var/home/core/zuul-output/0000755000175000017500000000000015067742616014543 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015067756202015503 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005027571115067756172017725 0ustar rootrootOct 03 12:50:02 crc systemd[1]: Starting Kubernetes Kubelet... Oct 03 12:50:02 crc restorecon[4686]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:02 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Oct 03 12:50:03 crc restorecon[4686]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Oct 03 12:50:04 crc kubenswrapper[4868]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 12:50:04 crc kubenswrapper[4868]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Oct 03 12:50:04 crc kubenswrapper[4868]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 12:50:04 crc kubenswrapper[4868]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 12:50:04 crc kubenswrapper[4868]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Oct 03 12:50:04 crc kubenswrapper[4868]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.242800 4868 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251460 4868 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251507 4868 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251513 4868 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251519 4868 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251523 4868 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251527 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251532 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251537 4868 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251542 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251546 4868 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251550 4868 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251555 4868 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251558 4868 feature_gate.go:330] unrecognized feature gate: Example Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251564 4868 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251572 4868 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251577 4868 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251583 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251588 4868 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251594 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251600 4868 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251607 4868 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251612 4868 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251618 4868 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251623 4868 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251629 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251633 4868 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251637 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251641 4868 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251645 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251649 4868 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251653 4868 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251657 4868 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251661 4868 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251665 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251670 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251674 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251677 4868 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251681 4868 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251685 4868 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251689 4868 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251693 4868 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251697 4868 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251701 4868 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251705 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251709 4868 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251713 4868 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251717 4868 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251721 4868 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251725 4868 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251729 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251735 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251739 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251744 4868 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251750 4868 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251755 4868 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251760 4868 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251764 4868 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251769 4868 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251773 4868 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251779 4868 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251783 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251788 4868 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251791 4868 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251796 4868 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251800 4868 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251805 4868 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251809 4868 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251812 4868 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251816 4868 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251819 4868 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.251823 4868 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253310 4868 flags.go:64] FLAG: --address="0.0.0.0" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253328 4868 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253335 4868 flags.go:64] FLAG: --anonymous-auth="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253342 4868 flags.go:64] FLAG: --application-metrics-count-limit="100" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253349 4868 flags.go:64] FLAG: --authentication-token-webhook="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253354 4868 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253360 4868 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253367 4868 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253372 4868 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253376 4868 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253382 4868 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253394 4868 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253399 4868 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253404 4868 flags.go:64] FLAG: --cgroup-root="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253408 4868 flags.go:64] FLAG: --cgroups-per-qos="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253413 4868 flags.go:64] FLAG: --client-ca-file="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253417 4868 flags.go:64] FLAG: --cloud-config="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253422 4868 flags.go:64] FLAG: --cloud-provider="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253426 4868 flags.go:64] FLAG: --cluster-dns="[]" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253435 4868 flags.go:64] FLAG: --cluster-domain="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253441 4868 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253445 4868 flags.go:64] FLAG: --config-dir="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253450 4868 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253455 4868 flags.go:64] FLAG: --container-log-max-files="5" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253462 4868 flags.go:64] FLAG: --container-log-max-size="10Mi" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253466 4868 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253472 4868 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253476 4868 flags.go:64] FLAG: --containerd-namespace="k8s.io" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253482 4868 flags.go:64] FLAG: --contention-profiling="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253486 4868 flags.go:64] FLAG: --cpu-cfs-quota="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253490 4868 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253495 4868 flags.go:64] FLAG: --cpu-manager-policy="none" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253499 4868 flags.go:64] FLAG: --cpu-manager-policy-options="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253506 4868 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253510 4868 flags.go:64] FLAG: --enable-controller-attach-detach="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253515 4868 flags.go:64] FLAG: --enable-debugging-handlers="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253519 4868 flags.go:64] FLAG: --enable-load-reader="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253524 4868 flags.go:64] FLAG: --enable-server="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253528 4868 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253539 4868 flags.go:64] FLAG: --event-burst="100" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253543 4868 flags.go:64] FLAG: --event-qps="50" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253548 4868 flags.go:64] FLAG: --event-storage-age-limit="default=0" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253553 4868 flags.go:64] FLAG: --event-storage-event-limit="default=0" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253557 4868 flags.go:64] FLAG: --eviction-hard="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253563 4868 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253567 4868 flags.go:64] FLAG: --eviction-minimum-reclaim="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253572 4868 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253580 4868 flags.go:64] FLAG: --eviction-soft="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253584 4868 flags.go:64] FLAG: --eviction-soft-grace-period="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253588 4868 flags.go:64] FLAG: --exit-on-lock-contention="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253593 4868 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253598 4868 flags.go:64] FLAG: --experimental-mounter-path="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253602 4868 flags.go:64] FLAG: --fail-cgroupv1="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253607 4868 flags.go:64] FLAG: --fail-swap-on="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253611 4868 flags.go:64] FLAG: --feature-gates="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253617 4868 flags.go:64] FLAG: --file-check-frequency="20s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253622 4868 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253627 4868 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253631 4868 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253636 4868 flags.go:64] FLAG: --healthz-port="10248" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253640 4868 flags.go:64] FLAG: --help="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253645 4868 flags.go:64] FLAG: --hostname-override="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253650 4868 flags.go:64] FLAG: --housekeeping-interval="10s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253654 4868 flags.go:64] FLAG: --http-check-frequency="20s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253659 4868 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253663 4868 flags.go:64] FLAG: --image-credential-provider-config="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253668 4868 flags.go:64] FLAG: --image-gc-high-threshold="85" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253672 4868 flags.go:64] FLAG: --image-gc-low-threshold="80" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253677 4868 flags.go:64] FLAG: --image-service-endpoint="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253682 4868 flags.go:64] FLAG: --kernel-memcg-notification="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253686 4868 flags.go:64] FLAG: --kube-api-burst="100" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253691 4868 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253698 4868 flags.go:64] FLAG: --kube-api-qps="50" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253703 4868 flags.go:64] FLAG: --kube-reserved="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253708 4868 flags.go:64] FLAG: --kube-reserved-cgroup="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253713 4868 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253719 4868 flags.go:64] FLAG: --kubelet-cgroups="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253725 4868 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253759 4868 flags.go:64] FLAG: --lock-file="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253765 4868 flags.go:64] FLAG: --log-cadvisor-usage="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253771 4868 flags.go:64] FLAG: --log-flush-frequency="5s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253776 4868 flags.go:64] FLAG: --log-json-info-buffer-size="0" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253788 4868 flags.go:64] FLAG: --log-json-split-stream="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253794 4868 flags.go:64] FLAG: --log-text-info-buffer-size="0" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253799 4868 flags.go:64] FLAG: --log-text-split-stream="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253804 4868 flags.go:64] FLAG: --logging-format="text" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253808 4868 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253814 4868 flags.go:64] FLAG: --make-iptables-util-chains="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253818 4868 flags.go:64] FLAG: --manifest-url="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253823 4868 flags.go:64] FLAG: --manifest-url-header="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253830 4868 flags.go:64] FLAG: --max-housekeeping-interval="15s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253835 4868 flags.go:64] FLAG: --max-open-files="1000000" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253840 4868 flags.go:64] FLAG: --max-pods="110" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253845 4868 flags.go:64] FLAG: --maximum-dead-containers="-1" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253850 4868 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253854 4868 flags.go:64] FLAG: --memory-manager-policy="None" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253859 4868 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253863 4868 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253868 4868 flags.go:64] FLAG: --node-ip="192.168.126.11" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253873 4868 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253887 4868 flags.go:64] FLAG: --node-status-max-images="50" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253892 4868 flags.go:64] FLAG: --node-status-update-frequency="10s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253896 4868 flags.go:64] FLAG: --oom-score-adj="-999" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253902 4868 flags.go:64] FLAG: --pod-cidr="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253906 4868 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253914 4868 flags.go:64] FLAG: --pod-manifest-path="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253919 4868 flags.go:64] FLAG: --pod-max-pids="-1" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253924 4868 flags.go:64] FLAG: --pods-per-core="0" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253928 4868 flags.go:64] FLAG: --port="10250" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253933 4868 flags.go:64] FLAG: --protect-kernel-defaults="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253937 4868 flags.go:64] FLAG: --provider-id="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253943 4868 flags.go:64] FLAG: --qos-reserved="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253947 4868 flags.go:64] FLAG: --read-only-port="10255" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253952 4868 flags.go:64] FLAG: --register-node="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253956 4868 flags.go:64] FLAG: --register-schedulable="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253961 4868 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253970 4868 flags.go:64] FLAG: --registry-burst="10" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253976 4868 flags.go:64] FLAG: --registry-qps="5" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253981 4868 flags.go:64] FLAG: --reserved-cpus="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253987 4868 flags.go:64] FLAG: --reserved-memory="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253994 4868 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.253998 4868 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254003 4868 flags.go:64] FLAG: --rotate-certificates="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254008 4868 flags.go:64] FLAG: --rotate-server-certificates="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254012 4868 flags.go:64] FLAG: --runonce="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254017 4868 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254022 4868 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254027 4868 flags.go:64] FLAG: --seccomp-default="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254039 4868 flags.go:64] FLAG: --serialize-image-pulls="true" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254044 4868 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254068 4868 flags.go:64] FLAG: --storage-driver-db="cadvisor" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254074 4868 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254079 4868 flags.go:64] FLAG: --storage-driver-password="root" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254084 4868 flags.go:64] FLAG: --storage-driver-secure="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254090 4868 flags.go:64] FLAG: --storage-driver-table="stats" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254095 4868 flags.go:64] FLAG: --storage-driver-user="root" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254102 4868 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254108 4868 flags.go:64] FLAG: --sync-frequency="1m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254112 4868 flags.go:64] FLAG: --system-cgroups="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254117 4868 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254127 4868 flags.go:64] FLAG: --system-reserved-cgroup="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254132 4868 flags.go:64] FLAG: --tls-cert-file="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254137 4868 flags.go:64] FLAG: --tls-cipher-suites="[]" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254144 4868 flags.go:64] FLAG: --tls-min-version="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254148 4868 flags.go:64] FLAG: --tls-private-key-file="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254153 4868 flags.go:64] FLAG: --topology-manager-policy="none" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254158 4868 flags.go:64] FLAG: --topology-manager-policy-options="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254162 4868 flags.go:64] FLAG: --topology-manager-scope="container" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254167 4868 flags.go:64] FLAG: --v="2" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254175 4868 flags.go:64] FLAG: --version="false" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254182 4868 flags.go:64] FLAG: --vmodule="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254188 4868 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.254193 4868 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254368 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254374 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254380 4868 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254385 4868 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254389 4868 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254393 4868 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254397 4868 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254404 4868 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254410 4868 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254414 4868 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254418 4868 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254422 4868 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254426 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254430 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254435 4868 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254439 4868 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254443 4868 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254447 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254452 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254458 4868 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254463 4868 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254467 4868 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254472 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254476 4868 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254480 4868 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254484 4868 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254488 4868 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254493 4868 feature_gate.go:330] unrecognized feature gate: Example Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254497 4868 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254502 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254506 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254509 4868 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254513 4868 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254517 4868 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254521 4868 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254525 4868 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254528 4868 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254532 4868 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254536 4868 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254542 4868 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254546 4868 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254550 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254553 4868 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254557 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254561 4868 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254565 4868 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254569 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254573 4868 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254576 4868 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254582 4868 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254587 4868 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254592 4868 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254596 4868 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254600 4868 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254603 4868 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254609 4868 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254613 4868 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254617 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254621 4868 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254625 4868 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254629 4868 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254634 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254638 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254643 4868 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254648 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254653 4868 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254657 4868 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254661 4868 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254666 4868 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254671 4868 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.254677 4868 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.255383 4868 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.267281 4868 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.267346 4868 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267488 4868 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267501 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267511 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267521 4868 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267529 4868 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267537 4868 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267545 4868 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267555 4868 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267566 4868 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267577 4868 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267588 4868 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267599 4868 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267608 4868 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267617 4868 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267625 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267634 4868 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267643 4868 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267651 4868 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267658 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267666 4868 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267678 4868 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267687 4868 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267696 4868 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267704 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267712 4868 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267720 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267728 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267736 4868 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267744 4868 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267755 4868 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267764 4868 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267775 4868 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267785 4868 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267793 4868 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267801 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267810 4868 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267818 4868 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267826 4868 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267835 4868 feature_gate.go:330] unrecognized feature gate: Example Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267844 4868 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267852 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267861 4868 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267869 4868 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267877 4868 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267884 4868 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267892 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267900 4868 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267908 4868 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267916 4868 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267924 4868 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267932 4868 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267940 4868 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267948 4868 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267956 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267964 4868 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267971 4868 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267979 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267990 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.267998 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268006 4868 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268013 4868 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268023 4868 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268032 4868 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268040 4868 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268048 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268084 4868 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268093 4868 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268101 4868 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268109 4868 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268116 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268124 4868 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.268171 4868 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268438 4868 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268450 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268459 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268468 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268475 4868 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268484 4868 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268493 4868 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268502 4868 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268510 4868 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268521 4868 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268533 4868 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268541 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268550 4868 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268559 4868 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268567 4868 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268577 4868 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268585 4868 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268594 4868 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268603 4868 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268612 4868 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268621 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268631 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268642 4868 feature_gate.go:330] unrecognized feature gate: SignatureStores Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268653 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268661 4868 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268669 4868 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268680 4868 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268690 4868 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268699 4868 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268709 4868 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268718 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268727 4868 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268736 4868 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268745 4868 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268754 4868 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268765 4868 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268775 4868 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268784 4868 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268792 4868 feature_gate.go:330] unrecognized feature gate: OVNObservability Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268801 4868 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268809 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268816 4868 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268825 4868 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268833 4868 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268841 4868 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268849 4868 feature_gate.go:330] unrecognized feature gate: NewOLM Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268857 4868 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268865 4868 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268873 4868 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268881 4868 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268889 4868 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268897 4868 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268905 4868 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268913 4868 feature_gate.go:330] unrecognized feature gate: Example Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268921 4868 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268931 4868 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268940 4868 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268948 4868 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268956 4868 feature_gate.go:330] unrecognized feature gate: GatewayAPI Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268964 4868 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268972 4868 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268980 4868 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268988 4868 feature_gate.go:330] unrecognized feature gate: PinnedImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.268996 4868 feature_gate.go:330] unrecognized feature gate: PlatformOperators Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.269004 4868 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.269014 4868 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.269022 4868 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.269030 4868 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.269038 4868 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.269046 4868 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.269078 4868 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.269092 4868 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.269399 4868 server.go:940] "Client rotation is on, will bootstrap in background" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.275424 4868 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.276372 4868 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.278158 4868 server.go:997] "Starting client certificate rotation" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.278202 4868 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.278438 4868 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-24 06:08:54.332128278 +0000 UTC Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.278652 4868 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1961h18m50.053483726s for next certificate rotation Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.312343 4868 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.321952 4868 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.345559 4868 log.go:25] "Validated CRI v1 runtime API" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.389633 4868 log.go:25] "Validated CRI v1 image API" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.392125 4868 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.399926 4868 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-10-03-12-44-57-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.399985 4868 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.437996 4868 manager.go:217] Machine: {Timestamp:2025-10-03 12:50:04.433887939 +0000 UTC m=+0.643737095 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:85afab4e-313e-432a-babc-46214b5eea5c BootID:29f6cb4f-8773-40be-94fe-aeed876e20ec Filesystems:[{Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:a1:83:14 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:a1:83:14 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:a8:3c:4f Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:f0:46:b1 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:10:c7:cf Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:31:e0:45 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:16:a6:7c:e9:cd:a5 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:6a:8d:c7:0a:28:b6 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.438527 4868 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.438782 4868 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.439572 4868 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.439878 4868 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.439950 4868 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.440277 4868 topology_manager.go:138] "Creating topology manager with none policy" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.440292 4868 container_manager_linux.go:303] "Creating device plugin manager" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.441104 4868 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.441151 4868 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.443376 4868 state_mem.go:36] "Initialized new in-memory state store" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.443509 4868 server.go:1245] "Using root directory" path="/var/lib/kubelet" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.448831 4868 kubelet.go:418] "Attempting to sync node with API server" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.448881 4868 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.448924 4868 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.448948 4868 kubelet.go:324] "Adding apiserver pod source" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.448967 4868 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.455168 4868 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.456843 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.457118 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.457213 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.457387 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.457830 4868 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.459429 4868 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461798 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461838 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461851 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461862 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461881 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461895 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461909 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461929 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461944 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.461959 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.462005 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.462021 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.464734 4868 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.465583 4868 server.go:1280] "Started kubelet" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.466968 4868 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.466957 4868 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.468147 4868 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Oct 03 12:50:04 crc systemd[1]: Started Kubernetes Kubelet. Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.478855 4868 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.480325 4868 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.480478 4868 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.480539 4868 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 10:58:35.367227008 +0000 UTC Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.480856 4868 volume_manager.go:287] "The desired_state_of_world populator starts" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.480863 4868 server.go:460] "Adding debug handlers to kubelet server" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.482382 4868 volume_manager.go:289] "Starting Kubelet Volume Manager" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.480880 4868 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.480926 4868 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.482838 4868 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 838h8m30.885805445s for next certificate rotation Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.483561 4868 factory.go:55] Registering systemd factory Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.483618 4868 factory.go:221] Registration of the systemd container factory successfully Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.482820 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.483914 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.484221 4868 factory.go:153] Registering CRI-O factory Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.484263 4868 factory.go:221] Registration of the crio container factory successfully Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.484557 4868 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.484626 4868 factory.go:103] Registering Raw factory Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.484672 4868 manager.go:1196] Started watching for new ooms in manager Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.485239 4868 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.58:6443: connect: connection refused" interval="200ms" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.484590 4868 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.58:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186afc1ca8f527f2 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-03 12:50:04.465522674 +0000 UTC m=+0.675371780,LastTimestamp:2025-10-03 12:50:04.465522674 +0000 UTC m=+0.675371780,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.487355 4868 manager.go:319] Starting recovery of all containers Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494601 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494692 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494714 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494727 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494739 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494750 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494762 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494773 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494789 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494801 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494813 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494829 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494842 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494860 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494874 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494888 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494901 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494915 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494930 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494948 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.494990 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495005 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495018 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495033 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495067 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495083 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495098 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495111 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495126 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495141 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495164 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495184 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495197 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495210 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495225 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495236 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495248 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495260 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495273 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495285 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495298 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495311 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495324 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495336 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495348 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495365 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495383 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495398 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495413 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495431 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495447 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495460 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495481 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495496 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495518 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495531 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495548 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495565 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495579 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495593 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495609 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495625 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495640 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495655 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495669 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495682 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495696 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495711 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495726 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495739 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495753 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495765 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495775 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495786 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495796 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495807 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495818 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495835 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495848 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495861 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495876 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495891 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495907 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495920 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495931 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495942 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495957 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495968 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495980 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.495994 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496008 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496022 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496040 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496068 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496083 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496097 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496110 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496124 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496138 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496152 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496167 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496178 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496190 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.496201 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498628 4868 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498671 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498690 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498705 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498722 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498745 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498769 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498785 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498805 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498831 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498848 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498869 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498882 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498897 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498915 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498930 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498952 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498966 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.498987 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499000 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499018 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499034 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499066 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499080 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499169 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499281 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499305 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499323 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499340 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499356 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499372 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499396 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499414 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499432 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499448 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499470 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499492 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499507 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499520 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499535 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499549 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499565 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499584 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499605 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499636 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499655 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499669 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499686 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499701 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499717 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499732 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499754 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499775 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499790 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499817 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499833 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499848 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499864 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499878 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499890 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499906 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499920 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499936 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499952 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499975 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.499992 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500006 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500021 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500035 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500049 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500086 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500101 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500114 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500130 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500146 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500161 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500175 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500190 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500205 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500220 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500238 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500253 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500267 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500286 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500337 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500355 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500370 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500384 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500399 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500417 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500431 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500444 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500460 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500478 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500498 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500518 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500541 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500560 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500581 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500600 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500631 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500656 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500680 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500701 4868 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500720 4868 reconstruct.go:97] "Volume reconstruction finished" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.500731 4868 reconciler.go:26] "Reconciler: start to sync state" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.511716 4868 manager.go:324] Recovery completed Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.524662 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.527851 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.527886 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.527902 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.528622 4868 cpu_manager.go:225] "Starting CPU manager" policy="none" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.528633 4868 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.528655 4868 state_mem.go:36] "Initialized new in-memory state store" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.541070 4868 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.542700 4868 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.542749 4868 status_manager.go:217] "Starting to sync pod status with apiserver" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.542796 4868 kubelet.go:2335] "Starting kubelet main sync loop" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.543011 4868 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Oct 03 12:50:04 crc kubenswrapper[4868]: W1003 12:50:04.546993 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.547076 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.549601 4868 policy_none.go:49] "None policy: Start" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.551228 4868 memory_manager.go:170] "Starting memorymanager" policy="None" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.551256 4868 state_mem.go:35] "Initializing new in-memory state store" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.583429 4868 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.617579 4868 manager.go:334] "Starting Device Plugin manager" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.617674 4868 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.617692 4868 server.go:79] "Starting device plugin registration server" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.618351 4868 eviction_manager.go:189] "Eviction manager: starting control loop" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.618374 4868 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.618662 4868 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.618784 4868 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.618809 4868 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.626355 4868 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.643673 4868 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.643835 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.647662 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.647714 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.647729 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.647986 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.648306 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.648354 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.649793 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.649825 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.649842 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.650264 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.650311 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.650324 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.650551 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.650679 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.650734 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.651581 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.651635 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.651648 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.651876 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.652020 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.652084 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.652297 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.652332 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.652350 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653035 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653087 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653100 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653046 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653282 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653579 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653639 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.653681 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.654407 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.654440 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.654451 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.654751 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.654828 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.654901 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.655146 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.655236 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.656130 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.656160 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.656172 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.686648 4868 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.58:6443: connect: connection refused" interval="400ms" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.702474 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.702647 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.702753 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.702848 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.702941 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703081 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703177 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703257 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703340 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703419 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703493 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703556 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703687 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703754 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.703831 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.720117 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.721282 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.721316 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.721326 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.721348 4868 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.721677 4868 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.58:6443: connect: connection refused" node="crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.804798 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805131 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805158 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805181 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805200 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805221 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805244 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805254 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805266 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.804860 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805221 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805302 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805244 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805332 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805346 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805360 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805375 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805391 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805392 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805407 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805421 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805435 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805441 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805456 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805474 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805490 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805530 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805429 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805557 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.805646 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.921770 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.923923 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.924094 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.924184 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.924278 4868 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 12:50:04 crc kubenswrapper[4868]: E1003 12:50:04.925252 4868 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.58:6443: connect: connection refused" node="crc" Oct 03 12:50:04 crc kubenswrapper[4868]: I1003 12:50:04.994987 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.005917 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.032921 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.042423 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.054226 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-85213156c9a92f716a8ea223f377fb23f5ec827369fd85ebdf61492a97430fd7 WatchSource:0}: Error finding container 85213156c9a92f716a8ea223f377fb23f5ec827369fd85ebdf61492a97430fd7: Status 404 returned error can't find the container with id 85213156c9a92f716a8ea223f377fb23f5ec827369fd85ebdf61492a97430fd7 Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.054781 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-cbac3d36b617fc81cae6c39c02b88e015a0fe3042b74d5a6875396b0eea4dbd8 WatchSource:0}: Error finding container cbac3d36b617fc81cae6c39c02b88e015a0fe3042b74d5a6875396b0eea4dbd8: Status 404 returned error can't find the container with id cbac3d36b617fc81cae6c39c02b88e015a0fe3042b74d5a6875396b0eea4dbd8 Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.060153 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-3417b3bc1831194f90b9016f99723ac64b635e2ce07e1b7cebe3b72cd61aac5a WatchSource:0}: Error finding container 3417b3bc1831194f90b9016f99723ac64b635e2ce07e1b7cebe3b72cd61aac5a: Status 404 returned error can't find the container with id 3417b3bc1831194f90b9016f99723ac64b635e2ce07e1b7cebe3b72cd61aac5a Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.060607 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-5edf6ea4821dc3c7420e31232d3b2d65bca8f293b755019f26c65beee0445fd5 WatchSource:0}: Error finding container 5edf6ea4821dc3c7420e31232d3b2d65bca8f293b755019f26c65beee0445fd5: Status 404 returned error can't find the container with id 5edf6ea4821dc3c7420e31232d3b2d65bca8f293b755019f26c65beee0445fd5 Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.062501 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.077322 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-487bafca21ab740c04b144e0dfcbe517f6793acbc8f209e387f60faad8e1194b WatchSource:0}: Error finding container 487bafca21ab740c04b144e0dfcbe517f6793acbc8f209e387f60faad8e1194b: Status 404 returned error can't find the container with id 487bafca21ab740c04b144e0dfcbe517f6793acbc8f209e387f60faad8e1194b Oct 03 12:50:05 crc kubenswrapper[4868]: E1003 12:50:05.087667 4868 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.58:6443: connect: connection refused" interval="800ms" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.326131 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.327728 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.327779 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.327794 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.327822 4868 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 12:50:05 crc kubenswrapper[4868]: E1003 12:50:05.328434 4868 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.58:6443: connect: connection refused" node="crc" Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.435309 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:05 crc kubenswrapper[4868]: E1003 12:50:05.435422 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.454409 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:05 crc kubenswrapper[4868]: E1003 12:50:05.454468 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.480576 4868 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.550772 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"487bafca21ab740c04b144e0dfcbe517f6793acbc8f209e387f60faad8e1194b"} Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.553360 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5edf6ea4821dc3c7420e31232d3b2d65bca8f293b755019f26c65beee0445fd5"} Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.555181 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"3417b3bc1831194f90b9016f99723ac64b635e2ce07e1b7cebe3b72cd61aac5a"} Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.557134 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cbac3d36b617fc81cae6c39c02b88e015a0fe3042b74d5a6875396b0eea4dbd8"} Oct 03 12:50:05 crc kubenswrapper[4868]: I1003 12:50:05.558631 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"85213156c9a92f716a8ea223f377fb23f5ec827369fd85ebdf61492a97430fd7"} Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.571592 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:05 crc kubenswrapper[4868]: E1003 12:50:05.571708 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:05 crc kubenswrapper[4868]: W1003 12:50:05.664622 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:05 crc kubenswrapper[4868]: E1003 12:50:05.664713 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:05 crc kubenswrapper[4868]: E1003 12:50:05.888923 4868 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.58:6443: connect: connection refused" interval="1.6s" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.129220 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.130806 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.130911 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.130924 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.130962 4868 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 12:50:06 crc kubenswrapper[4868]: E1003 12:50:06.131476 4868 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.58:6443: connect: connection refused" node="crc" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.480108 4868 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.565126 4868 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5" exitCode=0 Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.565231 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.565338 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.567923 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.567996 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.568009 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.568469 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.568533 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.568548 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.568555 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.568829 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.569660 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.569733 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.569747 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.570538 4868 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb" exitCode=0 Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.570636 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.570712 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.574634 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.574709 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.574736 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.576376 4868 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="883d06c702a32c08716e17aded638184cc0d1677c128c9d5a5b348d276b220f6" exitCode=0 Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.576502 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"883d06c702a32c08716e17aded638184cc0d1677c128c9d5a5b348d276b220f6"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.576561 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.577311 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.578368 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.578419 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.578432 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.578682 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.578729 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.578746 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.579807 4868 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6" exitCode=0 Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.579855 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6"} Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.579882 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.581220 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.581410 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:06 crc kubenswrapper[4868]: I1003 12:50:06.581436 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.479843 4868 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:07 crc kubenswrapper[4868]: E1003 12:50:07.490547 4868 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.58:6443: connect: connection refused" interval="3.2s" Oct 03 12:50:07 crc kubenswrapper[4868]: W1003 12:50:07.578392 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:07 crc kubenswrapper[4868]: E1003 12:50:07.578521 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.587083 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.587151 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.587166 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.587179 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.588531 4868 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="5d35d1e1ab2c0d69d28fe4f62f93806def95d4d9f1bb5a4670f6b1027c676b74" exitCode=0 Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.588596 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"5d35d1e1ab2c0d69d28fe4f62f93806def95d4d9f1bb5a4670f6b1027c676b74"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.588758 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.589910 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.589948 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.589963 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.591641 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.591720 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.592829 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.592854 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.592867 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.596809 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.596848 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.596889 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2"} Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.596892 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.596927 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.599284 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.599301 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.599337 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.599353 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.599355 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.599372 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:07 crc kubenswrapper[4868]: W1003 12:50:07.634246 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:07 crc kubenswrapper[4868]: E1003 12:50:07.634346 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:07 crc kubenswrapper[4868]: E1003 12:50:07.644949 4868 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.58:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.186afc1ca8f527f2 default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-10-03 12:50:04.465522674 +0000 UTC m=+0.675371780,LastTimestamp:2025-10-03 12:50:04.465522674 +0000 UTC m=+0.675371780,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.732227 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.733626 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.733681 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.733702 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:07 crc kubenswrapper[4868]: I1003 12:50:07.733737 4868 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 12:50:07 crc kubenswrapper[4868]: E1003 12:50:07.734478 4868 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.58:6443: connect: connection refused" node="crc" Oct 03 12:50:08 crc kubenswrapper[4868]: W1003 12:50:08.189210 4868 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.58:6443: connect: connection refused Oct 03 12:50:08 crc kubenswrapper[4868]: E1003 12:50:08.189347 4868 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.58:6443: connect: connection refused" logger="UnhandledError" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.603169 4868 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="beb2747d503d13132309238acda2b7f7d320e336add735df31dc3a1e4bf57ad3" exitCode=0 Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.603288 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"beb2747d503d13132309238acda2b7f7d320e336add735df31dc3a1e4bf57ad3"} Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.603323 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.604261 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.604301 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.604358 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.605046 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.607951 4868 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="11fec2aab4cc8eda538f1b4dade56f4734dca4f737a0ad15cc5c1852e78bb6d7" exitCode=255 Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.608089 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.608131 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"11fec2aab4cc8eda538f1b4dade56f4734dca4f737a0ad15cc5c1852e78bb6d7"} Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.608673 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.609512 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.609714 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.610525 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.610573 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.610618 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.611670 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.611717 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.611834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.611972 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.612021 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.612040 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:08 crc kubenswrapper[4868]: I1003 12:50:08.613949 4868 scope.go:117] "RemoveContainer" containerID="11fec2aab4cc8eda538f1b4dade56f4734dca4f737a0ad15cc5c1852e78bb6d7" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.292770 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.293102 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.295200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.295259 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.295272 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.613513 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.616444 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de"} Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.616528 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.617439 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.617477 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.617489 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.620766 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7385ab2a2940b7e4e47f062ded591b52f9eb6f3dc90f1095b4c08144f32b6df2"} Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.620799 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7fc7052b600348e57d0eafebb67878c2f4c169fdb22b7b6927d38c26ae3517cc"} Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.620813 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d470810f02379dab0231b7cecebe266614e1be08d1235691a1d44f3e9d34086d"} Oct 03 12:50:09 crc kubenswrapper[4868]: I1003 12:50:09.620825 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"924cc63fe362b1cd649fe1840583c63667b6edd68769fd2140fc39148d541506"} Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.630422 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"1744771e345dba247a9f2e35f33bf5091b1493dba1446cbac8632c40a8a77367"} Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.630506 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.630449 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.630686 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.631957 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.632030 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.632086 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.632896 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.632978 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.633002 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.934858 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.936746 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.936808 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.936825 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:10 crc kubenswrapper[4868]: I1003 12:50:10.936860 4868 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.137925 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.138222 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.140192 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.140251 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.140273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.220304 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.592531 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.592797 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.594836 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.594910 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.594934 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.634163 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.634251 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.634352 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.635814 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.635866 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.635883 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.636242 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.636315 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:11 crc kubenswrapper[4868]: I1003 12:50:11.636342 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:12 crc kubenswrapper[4868]: I1003 12:50:12.370632 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:12 crc kubenswrapper[4868]: I1003 12:50:12.637840 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 12:50:12 crc kubenswrapper[4868]: I1003 12:50:12.637906 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:12 crc kubenswrapper[4868]: I1003 12:50:12.639166 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:12 crc kubenswrapper[4868]: I1003 12:50:12.639216 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:12 crc kubenswrapper[4868]: I1003 12:50:12.639226 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.194550 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.194772 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.197175 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.197273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.197297 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.200611 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.640767 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.642233 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.642288 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:13 crc kubenswrapper[4868]: I1003 12:50:13.642302 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:14 crc kubenswrapper[4868]: I1003 12:50:14.051515 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:14 crc kubenswrapper[4868]: I1003 12:50:14.051772 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:14 crc kubenswrapper[4868]: I1003 12:50:14.053155 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:14 crc kubenswrapper[4868]: I1003 12:50:14.053228 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:14 crc kubenswrapper[4868]: I1003 12:50:14.053251 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:14 crc kubenswrapper[4868]: I1003 12:50:14.592707 4868 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Oct 03 12:50:14 crc kubenswrapper[4868]: I1003 12:50:14.592834 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 12:50:14 crc kubenswrapper[4868]: E1003 12:50:14.626541 4868 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.311177 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.311398 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.313267 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.313331 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.313344 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.317821 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.346660 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.346918 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.348355 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.348413 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.348427 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.647430 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.648419 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.648454 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:15 crc kubenswrapper[4868]: I1003 12:50:15.648465 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:18 crc kubenswrapper[4868]: I1003 12:50:18.481360 4868 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Oct 03 12:50:18 crc kubenswrapper[4868]: I1003 12:50:18.644127 4868 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 03 12:50:18 crc kubenswrapper[4868]: I1003 12:50:18.644204 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 03 12:50:18 crc kubenswrapper[4868]: I1003 12:50:18.649162 4868 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Oct 03 12:50:18 crc kubenswrapper[4868]: I1003 12:50:18.649248 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.343659 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.344007 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.345457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.345538 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.345553 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.383361 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.661911 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.662835 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.662870 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.662881 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:20 crc kubenswrapper[4868]: I1003 12:50:20.678123 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Oct 03 12:50:21 crc kubenswrapper[4868]: I1003 12:50:21.664885 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:21 crc kubenswrapper[4868]: I1003 12:50:21.666347 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:21 crc kubenswrapper[4868]: I1003 12:50:21.666401 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:21 crc kubenswrapper[4868]: I1003 12:50:21.666416 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.376759 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.377322 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.377680 4868 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.377746 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.378703 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.378832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.378917 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.382645 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.667394 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.668224 4868 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.668303 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.668699 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.668757 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:22 crc kubenswrapper[4868]: I1003 12:50:22.668771 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:23 crc kubenswrapper[4868]: E1003 12:50:23.632819 4868 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.633449 4868 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Oct 03 12:50:23 crc kubenswrapper[4868]: E1003 12:50:23.635974 4868 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.638556 4868 trace.go:236] Trace[2147264096]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 12:50:10.797) (total time: 12841ms): Oct 03 12:50:23 crc kubenswrapper[4868]: Trace[2147264096]: ---"Objects listed" error: 12841ms (12:50:23.638) Oct 03 12:50:23 crc kubenswrapper[4868]: Trace[2147264096]: [12.841328145s] [12.841328145s] END Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.638600 4868 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.638575 4868 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.639688 4868 trace.go:236] Trace[1837110424]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 12:50:08.642) (total time: 14996ms): Oct 03 12:50:23 crc kubenswrapper[4868]: Trace[1837110424]: ---"Objects listed" error: 14996ms (12:50:23.639) Oct 03 12:50:23 crc kubenswrapper[4868]: Trace[1837110424]: [14.996748394s] [14.996748394s] END Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.639703 4868 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.643959 4868 trace.go:236] Trace[663171156]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (03-Oct-2025 12:50:13.502) (total time: 10141ms): Oct 03 12:50:23 crc kubenswrapper[4868]: Trace[663171156]: ---"Objects listed" error: 10141ms (12:50:23.643) Oct 03 12:50:23 crc kubenswrapper[4868]: Trace[663171156]: [10.141731303s] [10.141731303s] END Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.644014 4868 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.725332 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:23 crc kubenswrapper[4868]: I1003 12:50:23.729817 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.052887 4868 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.053285 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.462446 4868 apiserver.go:52] "Watching apiserver" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.464975 4868 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.465298 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.465636 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.465712 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.465727 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.465913 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.465933 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.465971 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.466350 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.466355 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.466452 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.468029 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.469164 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.469383 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.469519 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.470064 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.470066 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.470956 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.470985 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.471233 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.483581 4868 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.495599 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.505011 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.515485 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.525867 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.534778 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.544731 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.544891 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545041 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545190 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545309 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545402 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545526 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545861 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545688 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545773 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545787 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545852 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545957 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545799 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.545985 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546207 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546271 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546306 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546328 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546348 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546377 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546396 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546446 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546475 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546496 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546516 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546537 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546557 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546577 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546591 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546608 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546639 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546973 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547008 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547092 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547113 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547166 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547214 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547332 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547361 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547386 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547437 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547464 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547535 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547571 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547588 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547604 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547622 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547638 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547675 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547704 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547730 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547763 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547786 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547807 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547832 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547855 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547876 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547892 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547908 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547923 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547948 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547963 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547979 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548004 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548030 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548046 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548081 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548099 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548115 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548275 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548294 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548314 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548336 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548358 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548380 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548402 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548432 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548455 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548471 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548488 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548507 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548526 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548545 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548563 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548584 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548605 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548622 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548641 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548658 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548673 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548689 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548706 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548724 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548739 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548754 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548772 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548790 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548806 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548822 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548838 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548854 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548870 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548886 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548914 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548953 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548973 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549013 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549031 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549049 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549089 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549106 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549122 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549138 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549165 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549184 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549201 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549218 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549234 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549255 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549273 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549298 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549313 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549330 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549347 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549364 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549382 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549400 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549420 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549459 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549476 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549494 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549511 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549527 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549545 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549561 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549576 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549593 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549610 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549626 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549642 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549672 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549690 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549704 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549720 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549740 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549766 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549784 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549808 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549828 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549847 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549863 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549880 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549924 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549944 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549962 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.549982 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550001 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550019 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550045 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550089 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550108 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550125 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550143 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550170 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550188 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550207 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550225 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550243 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550262 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550279 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550297 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550318 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550334 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550351 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550370 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550388 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550420 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550447 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550465 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550482 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550540 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550560 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550583 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550601 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550622 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550653 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550670 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550687 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550707 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550736 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550779 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550801 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550825 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550854 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550875 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550901 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550921 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550952 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550973 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551001 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551019 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551039 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551073 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551135 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551163 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551188 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551209 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551238 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551263 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551282 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551311 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551331 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551370 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551397 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551423 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551454 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551475 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551547 4868 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551559 4868 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551569 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551580 4868 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551592 4868 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551602 4868 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551614 4868 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551626 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.546602 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551781 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547002 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547411 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547248 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.547830 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548109 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548103 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548183 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548450 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548547 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.548575 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550111 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550511 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550513 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550701 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550837 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.550984 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551028 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551231 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551269 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551284 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551427 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552028 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551712 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551763 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.551859 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552053 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552393 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552488 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552442 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552569 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552573 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.552642 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.553892 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.555508 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.555747 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.555951 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.557033 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.557159 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.557651 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.557841 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.558009 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.558082 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.558365 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.558659 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.558833 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.558859 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.559010 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.559221 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.559440 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.560631 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.560758 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.561223 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.562347 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.562330 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.562624 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.562803 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563154 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563164 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563077 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563461 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563386 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.563539 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:50:25.063499889 +0000 UTC m=+21.273348975 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563645 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563893 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563911 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563914 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.563899 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.564142 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.564180 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.564205 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.564231 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.564434 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.564623 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.564961 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.565189 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.565232 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.565930 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.565972 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566013 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566040 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566080 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566144 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566225 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.565446 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566530 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566880 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.566813 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.567181 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.567733 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.567922 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.568078 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.568116 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.568162 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.568453 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.568562 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.568588 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.568599 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.569222 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.569251 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.569277 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.569375 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:25.069338501 +0000 UTC m=+21.279187567 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.569420 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.569459 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.570483 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.570704 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.570723 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.570964 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.571020 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.571119 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.571343 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.571440 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.571451 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.571607 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.571953 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.572439 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.574548 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.574543 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.574876 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.575354 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.578551 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.578783 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.578894 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.579079 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.579317 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.579749 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.580045 4868 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.580307 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.580786 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.583805 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.584292 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.585440 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.585556 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.585614 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:25.085595167 +0000 UTC m=+21.295444233 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.585864 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.586291 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.587136 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.587560 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.588134 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.588593 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.588635 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.588647 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:25.088635071 +0000 UTC m=+21.298484137 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.589076 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.589187 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.589400 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.589970 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.590429 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.590781 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591205 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591263 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591441 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591637 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591516 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591827 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591739 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.592272 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.591913 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.592517 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.592984 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.593924 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.594005 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.594008 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.594441 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.594445 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.594773 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.594920 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.596890 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.599794 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.602290 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.609400 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.611223 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.613161 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.613822 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.613865 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.613883 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.613963 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:25.113934856 +0000 UTC m=+21.323784112 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.615216 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.615405 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.617189 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.617286 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.617560 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.618694 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.619773 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.619985 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.620441 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.620573 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.621608 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.622398 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.622583 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.622712 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.622791 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.623316 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.623426 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.623509 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.623519 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.623878 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.624204 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.624248 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.624716 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.626241 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.626836 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.627034 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.627892 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.631184 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.631536 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.633574 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.633840 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.652440 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653396 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653447 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653535 4868 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653553 4868 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653565 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653577 4868 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653591 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653603 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653615 4868 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653628 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653640 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653655 4868 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653667 4868 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653678 4868 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653690 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653703 4868 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653715 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653727 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653740 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653752 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653764 4868 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653775 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653786 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653797 4868 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653808 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653820 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653831 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653844 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653857 4868 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653869 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653880 4868 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653891 4868 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653903 4868 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653914 4868 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653927 4868 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653939 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653951 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653963 4868 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653975 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.653988 4868 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654000 4868 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654015 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654027 4868 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654041 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654074 4868 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654087 4868 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654099 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654112 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654124 4868 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654136 4868 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654147 4868 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654158 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654171 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654183 4868 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654195 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654206 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654221 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654232 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654243 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654257 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654270 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654282 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654292 4868 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654303 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654314 4868 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654329 4868 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654343 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654356 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654369 4868 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654382 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654394 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654406 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654417 4868 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654429 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654443 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654455 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654468 4868 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654479 4868 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654491 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654503 4868 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654515 4868 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654519 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654562 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654578 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654595 4868 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654609 4868 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654619 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654627 4868 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654637 4868 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654647 4868 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654656 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654664 4868 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654674 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654683 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654692 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654703 4868 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654712 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654728 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654737 4868 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654781 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654793 4868 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654802 4868 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654810 4868 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654818 4868 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654827 4868 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654836 4868 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654845 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654854 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654864 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654872 4868 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654881 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654892 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654900 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654909 4868 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654917 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654926 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654936 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654945 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654954 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654963 4868 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654971 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654980 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654990 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.654997 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655007 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655016 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655026 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655035 4868 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655045 4868 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655073 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655082 4868 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655091 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655101 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655110 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655120 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655130 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655145 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655154 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655163 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655172 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655181 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655191 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655202 4868 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655214 4868 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655228 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655237 4868 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655248 4868 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655258 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655268 4868 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655277 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655289 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655299 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655308 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655317 4868 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655330 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655351 4868 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655408 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655421 4868 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655430 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655439 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655448 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655457 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655466 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655476 4868 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655485 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655495 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655508 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655517 4868 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655527 4868 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655536 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655545 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655555 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655567 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655575 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655586 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655593 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655602 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655613 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655622 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655632 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655640 4868 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655650 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655659 4868 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655667 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655676 4868 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655685 4868 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655694 4868 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.655704 4868 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.670015 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.673764 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.681908 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.685496 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.685906 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.688556 4868 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de" exitCode=255 Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.689077 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de"} Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.689162 4868 scope.go:117] "RemoveContainer" containerID="11fec2aab4cc8eda538f1b4dade56f4734dca4f737a0ad15cc5c1852e78bb6d7" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.696497 4868 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.701199 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.703634 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.703773 4868 scope.go:117] "RemoveContainer" containerID="8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de" Oct 03 12:50:24 crc kubenswrapper[4868]: E1003 12:50:24.704026 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.715190 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.731699 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.746558 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.756987 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.757018 4868 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.760913 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.776348 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.779725 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.787896 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.792336 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Oct 03 12:50:24 crc kubenswrapper[4868]: W1003 12:50:24.796344 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-4855e66ec121e98ad919d8ed5f6351a47a559d6c4591bde4079413066cab0b96 WatchSource:0}: Error finding container 4855e66ec121e98ad919d8ed5f6351a47a559d6c4591bde4079413066cab0b96: Status 404 returned error can't find the container with id 4855e66ec121e98ad919d8ed5f6351a47a559d6c4591bde4079413066cab0b96 Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.796669 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://11fec2aab4cc8eda538f1b4dade56f4734dca4f737a0ad15cc5c1852e78bb6d7\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:08Z\\\",\\\"message\\\":\\\"W1003 12:50:07.804675 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1003 12:50:07.805008 1 crypto.go:601] Generating new CA for check-endpoints-signer@1759495807 cert, and key in /tmp/serving-cert-3405846701/serving-signer.crt, /tmp/serving-cert-3405846701/serving-signer.key\\\\nI1003 12:50:08.036672 1 observer_polling.go:159] Starting file observer\\\\nW1003 12:50:08.040822 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1003 12:50:08.041003 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:08.042784 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3405846701/tls.crt::/tmp/serving-cert-3405846701/tls.key\\\\\\\"\\\\nF1003 12:50:08.274501 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: W1003 12:50:24.804795 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-63cc490599a5a0f4e3e3ba7beee1ef772706fbb00c0ae32fd42ac9031f3831e0 WatchSource:0}: Error finding container 63cc490599a5a0f4e3e3ba7beee1ef772706fbb00c0ae32fd42ac9031f3831e0: Status 404 returned error can't find the container with id 63cc490599a5a0f4e3e3ba7beee1ef772706fbb00c0ae32fd42ac9031f3831e0 Oct 03 12:50:24 crc kubenswrapper[4868]: W1003 12:50:24.805404 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-b97f3926e260175943426130f66eca0e72077b5a21266ebb887ac9a83aa88317 WatchSource:0}: Error finding container b97f3926e260175943426130f66eca0e72077b5a21266ebb887ac9a83aa88317: Status 404 returned error can't find the container with id b97f3926e260175943426130f66eca0e72077b5a21266ebb887ac9a83aa88317 Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.810880 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.822980 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.836991 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.849618 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.859203 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.873018 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:24 crc kubenswrapper[4868]: I1003 12:50:24.886605 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.160721 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.160823 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.160869 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.160899 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.160942 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:50:26.16089699 +0000 UTC m=+22.370746056 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.161000 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161102 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161208 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161250 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:26.161205148 +0000 UTC m=+22.371054404 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161112 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161295 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:26.161270099 +0000 UTC m=+22.371119175 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161322 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161344 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161137 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161389 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:26.161378312 +0000 UTC m=+22.371227598 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161403 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161426 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.161468 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:26.161457634 +0000 UTC m=+22.371306980 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.167254 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.543913 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.544107 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.693552 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.696870 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b97f3926e260175943426130f66eca0e72077b5a21266ebb887ac9a83aa88317"} Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.697426 4868 scope.go:117] "RemoveContainer" containerID="8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de" Oct 03 12:50:25 crc kubenswrapper[4868]: E1003 12:50:25.697910 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.697929 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"63cc490599a5a0f4e3e3ba7beee1ef772706fbb00c0ae32fd42ac9031f3831e0"} Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.698996 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"4855e66ec121e98ad919d8ed5f6351a47a559d6c4591bde4079413066cab0b96"} Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.964546 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-cvsg7"] Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.965560 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.970813 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.970852 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.970951 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:25 crc kubenswrapper[4868]: I1003 12:50:25.973461 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.014052 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.044592 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.070254 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6930e7fb-9970-470b-b08c-8560249f8597-hosts-file\") pod \"node-resolver-cvsg7\" (UID: \"6930e7fb-9970-470b-b08c-8560249f8597\") " pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.070494 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjg6c\" (UniqueName: \"kubernetes.io/projected/6930e7fb-9970-470b-b08c-8560249f8597-kube-api-access-vjg6c\") pod \"node-resolver-cvsg7\" (UID: \"6930e7fb-9970-470b-b08c-8560249f8597\") " pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.080429 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.109645 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.133523 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.148592 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.158291 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.171175 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.171253 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.171347 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.171371 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.171387 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.171407 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6930e7fb-9970-470b-b08c-8560249f8597-hosts-file\") pod \"node-resolver-cvsg7\" (UID: \"6930e7fb-9970-470b-b08c-8560249f8597\") " pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.171422 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjg6c\" (UniqueName: \"kubernetes.io/projected/6930e7fb-9970-470b-b08c-8560249f8597-kube-api-access-vjg6c\") pod \"node-resolver-cvsg7\" (UID: \"6930e7fb-9970-470b-b08c-8560249f8597\") " pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172248 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:50:28.172207609 +0000 UTC m=+24.382056675 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.172259 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/6930e7fb-9970-470b-b08c-8560249f8597-hosts-file\") pod \"node-resolver-cvsg7\" (UID: \"6930e7fb-9970-470b-b08c-8560249f8597\") " pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172270 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172390 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172412 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172411 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172493 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:28.172467225 +0000 UTC m=+24.382316291 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172550 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:28.172517556 +0000 UTC m=+24.382366622 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172327 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.172616 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:28.172606028 +0000 UTC m=+24.382455094 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.175544 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.175579 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.175599 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.175699 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:28.175683204 +0000 UTC m=+24.385532270 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.176641 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.188918 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.202136 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.214629 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.223693 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.233240 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.242330 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.252216 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.257836 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjg6c\" (UniqueName: \"kubernetes.io/projected/6930e7fb-9970-470b-b08c-8560249f8597-kube-api-access-vjg6c\") pod \"node-resolver-cvsg7\" (UID: \"6930e7fb-9970-470b-b08c-8560249f8597\") " pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.262618 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.384071 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jpqwj"] Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.384489 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.386557 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-kbwqg"] Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.386969 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.387860 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.388455 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.389265 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.389483 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.389613 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.389769 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.390130 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.390387 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.390451 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.390654 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.400035 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.412462 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.422598 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.430446 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.439010 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.449210 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.458259 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.466892 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474019 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-netns\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474084 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-cni-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474131 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-socket-dir-parent\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474159 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-system-cni-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474277 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-cni-bin\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474307 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-cni-multus\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474374 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/71ca0541-cbbf-4390-b90e-f068349a51f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474426 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-cnibin\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474617 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-k8s-cni-cncf-io\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474649 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-daemon-config\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474705 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-multus-certs\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474722 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-kubelet\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474766 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-conf-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474795 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/61cc9d5b-e515-469c-a472-190ebf3609a3-cni-binary-copy\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474872 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-hostroot\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.474952 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/71ca0541-cbbf-4390-b90e-f068349a51f6-rootfs\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.475002 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/71ca0541-cbbf-4390-b90e-f068349a51f6-proxy-tls\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.475043 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxgpt\" (UniqueName: \"kubernetes.io/projected/71ca0541-cbbf-4390-b90e-f068349a51f6-kube-api-access-wxgpt\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.475205 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-etc-kubernetes\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.475235 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwkrf\" (UniqueName: \"kubernetes.io/projected/61cc9d5b-e515-469c-a472-190ebf3609a3-kube-api-access-xwkrf\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.475404 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-os-release\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.477130 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.485480 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.496971 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.507074 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.519732 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.521179 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cvsg7" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.532511 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: W1003 12:50:26.532836 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6930e7fb_9970_470b_b08c_8560249f8597.slice/crio-a984d6eab538f138b3d6ae4589476bf9348279a2481df637756ea8c59bf9f035 WatchSource:0}: Error finding container a984d6eab538f138b3d6ae4589476bf9348279a2481df637756ea8c59bf9f035: Status 404 returned error can't find the container with id a984d6eab538f138b3d6ae4589476bf9348279a2481df637756ea8c59bf9f035 Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.543629 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.543823 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.544043 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.544338 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.545408 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.548007 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.548977 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.550432 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.551230 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.552800 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.553593 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.554664 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.555906 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.559619 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.560730 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.562331 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.563270 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.564141 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.564753 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.565523 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.566590 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.567174 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.568275 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.568786 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.569803 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.570864 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.571434 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.572524 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.573118 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.573861 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.574002 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.574896 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.575697 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576216 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-cni-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576258 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-socket-dir-parent\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576281 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-netns\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576299 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-system-cni-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576315 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-cni-bin\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576331 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-cni-multus\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576348 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/71ca0541-cbbf-4390-b90e-f068349a51f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576364 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-cnibin\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576386 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-k8s-cni-cncf-io\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576406 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-kubelet\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576400 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-cni-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576466 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-conf-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576484 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-daemon-config\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576501 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-multus-certs\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576522 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/61cc9d5b-e515-469c-a472-190ebf3609a3-cni-binary-copy\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576537 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-hostroot\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576554 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/71ca0541-cbbf-4390-b90e-f068349a51f6-rootfs\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576572 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxgpt\" (UniqueName: \"kubernetes.io/projected/71ca0541-cbbf-4390-b90e-f068349a51f6-kube-api-access-wxgpt\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576599 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/71ca0541-cbbf-4390-b90e-f068349a51f6-proxy-tls\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576619 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-etc-kubernetes\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576635 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwkrf\" (UniqueName: \"kubernetes.io/projected/61cc9d5b-e515-469c-a472-190ebf3609a3-kube-api-access-xwkrf\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576653 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-socket-dir-parent\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576661 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-os-release\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576694 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-k8s-cni-cncf-io\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576706 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-netns\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576655 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-system-cni-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576769 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-run-multus-certs\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576778 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/71ca0541-cbbf-4390-b90e-f068349a51f6-rootfs\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576779 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-kubelet\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576795 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-hostroot\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576827 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-etc-kubernetes\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576837 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-cnibin\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576951 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-conf-dir\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.576984 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-cni-multus\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.577036 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-os-release\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.577173 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/61cc9d5b-e515-469c-a472-190ebf3609a3-host-var-lib-cni-bin\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.577256 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/61cc9d5b-e515-469c-a472-190ebf3609a3-multus-daemon-config\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.577623 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/61cc9d5b-e515-469c-a472-190ebf3609a3-cni-binary-copy\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.577347 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/71ca0541-cbbf-4390-b90e-f068349a51f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.578526 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.579236 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.580268 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.581355 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/71ca0541-cbbf-4390-b90e-f068349a51f6-proxy-tls\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.585314 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.593115 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwkrf\" (UniqueName: \"kubernetes.io/projected/61cc9d5b-e515-469c-a472-190ebf3609a3-kube-api-access-xwkrf\") pod \"multus-jpqwj\" (UID: \"61cc9d5b-e515-469c-a472-190ebf3609a3\") " pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.596360 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxgpt\" (UniqueName: \"kubernetes.io/projected/71ca0541-cbbf-4390-b90e-f068349a51f6-kube-api-access-wxgpt\") pod \"machine-config-daemon-kbwqg\" (UID: \"71ca0541-cbbf-4390-b90e-f068349a51f6\") " pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.596500 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.606624 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.617692 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.624162 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.624780 4868 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.624924 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.627217 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.627780 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.628308 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.630583 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.631572 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.632529 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.633343 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.634567 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.635134 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.635843 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.637133 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.638433 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.639016 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.640158 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.640884 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.642273 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.642823 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.644126 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.644679 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.645265 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.646371 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.646851 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.697645 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jpqwj" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.703341 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.706811 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d"} Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.708508 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cvsg7" event={"ID":"6930e7fb-9970-470b-b08c-8560249f8597","Type":"ContainerStarted","Data":"a984d6eab538f138b3d6ae4589476bf9348279a2481df637756ea8c59bf9f035"} Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.708786 4868 scope.go:117] "RemoveContainer" containerID="8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de" Oct 03 12:50:26 crc kubenswrapper[4868]: E1003 12:50:26.708962 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Oct 03 12:50:26 crc kubenswrapper[4868]: W1003 12:50:26.723411 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71ca0541_cbbf_4390_b90e_f068349a51f6.slice/crio-3f0ccc177900d61739d9a98915e2cf3a2e134492387792140d53361916e1d6c8 WatchSource:0}: Error finding container 3f0ccc177900d61739d9a98915e2cf3a2e134492387792140d53361916e1d6c8: Status 404 returned error can't find the container with id 3f0ccc177900d61739d9a98915e2cf3a2e134492387792140d53361916e1d6c8 Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.736174 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-cjm4x"] Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.737813 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fgxcz"] Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.738023 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.738918 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.740448 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.741037 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.741155 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.741357 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.741419 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.741426 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.741358 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.742135 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.742215 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.755245 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.766873 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.781492 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.792449 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.807549 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.820271 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.832019 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.842237 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.854349 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.865295 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.876010 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879554 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-os-release\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879617 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-netns\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879699 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-bin\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879723 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-netd\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879745 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-env-overrides\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879787 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovn-node-metrics-cert\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879903 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-kubelet\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.879983 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-systemd\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880128 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-ovn-kubernetes\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880249 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880288 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-config\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880321 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cnibin\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880348 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z65d4\" (UniqueName: \"kubernetes.io/projected/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-kube-api-access-z65d4\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880411 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880445 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-etc-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880485 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880515 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-log-socket\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880544 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cni-binary-copy\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880574 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880601 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-systemd-units\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880652 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-slash\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880682 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-ovn\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880710 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-node-log\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880767 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-system-cni-dir\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880838 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-var-lib-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880865 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-script-lib\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.880888 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2skbw\" (UniqueName: \"kubernetes.io/projected/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-kube-api-access-2skbw\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.883577 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.893526 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.904667 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.915672 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.927882 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.938937 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.947358 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.963654 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.975274 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981807 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981865 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cnibin\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981888 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z65d4\" (UniqueName: \"kubernetes.io/projected/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-kube-api-access-z65d4\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981914 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-config\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981930 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cnibin\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981940 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981995 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-etc-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.981972 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982014 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982036 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-log-socket\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982067 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cni-binary-copy\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982084 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982100 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-systemd-units\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982123 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-slash\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982133 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-log-socket\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982138 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-ovn\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982170 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-node-log\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982184 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-system-cni-dir\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982171 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982091 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-etc-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982202 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2skbw\" (UniqueName: \"kubernetes.io/projected/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-kube-api-access-2skbw\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982219 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-var-lib-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982234 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-script-lib\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982248 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-os-release\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982263 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-netns\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982288 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-bin\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982315 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-netd\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982352 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-env-overrides\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982380 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovn-node-metrics-cert\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982404 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-var-lib-openvswitch\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982415 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-kubelet\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982435 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-systemd\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982444 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-slash\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982446 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-netns\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982476 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-ovn-kubernetes\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982480 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-ovn\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982477 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-netd\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982526 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-system-cni-dir\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982537 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-bin\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982556 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-os-release\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982654 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-systemd-units\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982652 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-kubelet\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982647 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-node-log\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982750 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-ovn-kubernetes\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.982800 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-systemd\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.986279 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovn-node-metrics-cert\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.987315 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.996125 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-tuning-conf-dir\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.998183 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cni-binary-copy\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.998451 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:26 crc kubenswrapper[4868]: I1003 12:50:26.999184 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.001369 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2skbw\" (UniqueName: \"kubernetes.io/projected/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-kube-api-access-2skbw\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.001980 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-env-overrides\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.002151 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-config\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.002414 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-script-lib\") pod \"ovnkube-node-fgxcz\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.003028 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z65d4\" (UniqueName: \"kubernetes.io/projected/e2c273fa-527b-44a3-acd1-37e17e1f7d1c-kube-api-access-z65d4\") pod \"multus-additional-cni-plugins-cjm4x\" (UID: \"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\") " pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.021393 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.044621 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.061614 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.062265 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.294861 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.543725 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:27 crc kubenswrapper[4868]: E1003 12:50:27.544411 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.712345 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"3f0ccc177900d61739d9a98915e2cf3a2e134492387792140d53361916e1d6c8"} Oct 03 12:50:27 crc kubenswrapper[4868]: I1003 12:50:27.712944 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jpqwj" event={"ID":"61cc9d5b-e515-469c-a472-190ebf3609a3","Type":"ContainerStarted","Data":"4e352f88276a56a382b719d837f405c2dc29662d7557a2882bf177d7e2e22d28"} Oct 03 12:50:27 crc kubenswrapper[4868]: W1003 12:50:27.905386 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46b5db5d_3104_43ab_9ae7_080ec1f50ca9.slice/crio-afefc4e5b6ee82429e1b712bc5a39a4dcb74af1d209ce8643b62e893dab962d5 WatchSource:0}: Error finding container afefc4e5b6ee82429e1b712bc5a39a4dcb74af1d209ce8643b62e893dab962d5: Status 404 returned error can't find the container with id afefc4e5b6ee82429e1b712bc5a39a4dcb74af1d209ce8643b62e893dab962d5 Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.204813 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.204943 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.204967 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.204984 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.205009 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205098 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:50:32.205041714 +0000 UTC m=+28.414890780 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205156 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205161 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205175 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205174 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205232 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:32.205218919 +0000 UTC m=+28.415067985 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205195 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205296 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:32.20527055 +0000 UTC m=+28.415119616 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205196 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205346 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:32.205332781 +0000 UTC m=+28.415182017 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205354 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205369 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.205413 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:32.205403083 +0000 UTC m=+28.415252339 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.543794 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.543930 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.543960 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:28 crc kubenswrapper[4868]: E1003 12:50:28.544172 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.717705 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"afefc4e5b6ee82429e1b712bc5a39a4dcb74af1d209ce8643b62e893dab962d5"} Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.719744 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerStarted","Data":"36bb35d1a8b9800ae0f0e548897f5f51c62c328d80cf8af6b5b1de0f4f3ad857"} Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.721512 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665"} Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.886416 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-597z4"] Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.886791 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.888774 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.888813 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.889217 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.889641 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.896129 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.912946 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.922403 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.933661 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.942207 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.953379 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.963626 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.970918 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.978069 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.987397 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:28 crc kubenswrapper[4868]: I1003 12:50:28.996658 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.007603 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.011916 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8948782-cb43-4adf-a2a3-c5c22a3f1254-host\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.011990 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqv45\" (UniqueName: \"kubernetes.io/projected/c8948782-cb43-4adf-a2a3-c5c22a3f1254-kube-api-access-mqv45\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.012085 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c8948782-cb43-4adf-a2a3-c5c22a3f1254-serviceca\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.017498 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.027179 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.113497 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqv45\" (UniqueName: \"kubernetes.io/projected/c8948782-cb43-4adf-a2a3-c5c22a3f1254-kube-api-access-mqv45\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.113556 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c8948782-cb43-4adf-a2a3-c5c22a3f1254-serviceca\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.113577 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8948782-cb43-4adf-a2a3-c5c22a3f1254-host\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.113632 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c8948782-cb43-4adf-a2a3-c5c22a3f1254-host\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.114609 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/c8948782-cb43-4adf-a2a3-c5c22a3f1254-serviceca\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.128944 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqv45\" (UniqueName: \"kubernetes.io/projected/c8948782-cb43-4adf-a2a3-c5c22a3f1254-kube-api-access-mqv45\") pod \"node-ca-597z4\" (UID: \"c8948782-cb43-4adf-a2a3-c5c22a3f1254\") " pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.198373 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-597z4" Oct 03 12:50:29 crc kubenswrapper[4868]: W1003 12:50:29.209765 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8948782_cb43_4adf_a2a3_c5c22a3f1254.slice/crio-38c154f9070fa790958804a8932e2bce46923cc3976ccdf592cb27a30fc9b55a WatchSource:0}: Error finding container 38c154f9070fa790958804a8932e2bce46923cc3976ccdf592cb27a30fc9b55a: Status 404 returned error can't find the container with id 38c154f9070fa790958804a8932e2bce46923cc3976ccdf592cb27a30fc9b55a Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.543000 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:29 crc kubenswrapper[4868]: E1003 12:50:29.543991 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.725957 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cvsg7" event={"ID":"6930e7fb-9970-470b-b08c-8560249f8597","Type":"ContainerStarted","Data":"aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7"} Oct 03 12:50:29 crc kubenswrapper[4868]: I1003 12:50:29.727116 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-597z4" event={"ID":"c8948782-cb43-4adf-a2a3-c5c22a3f1254","Type":"ContainerStarted","Data":"38c154f9070fa790958804a8932e2bce46923cc3976ccdf592cb27a30fc9b55a"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.036124 4868 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.040156 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.040205 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.040214 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.040439 4868 kubelet_node_status.go:76] "Attempting to register node" node="crc" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.048754 4868 kubelet_node_status.go:115] "Node was previously registered" node="crc" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.049114 4868 kubelet_node_status.go:79] "Successfully registered node" node="crc" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.050610 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.050651 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.050663 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.050677 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.050688 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.064501 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.068960 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.069001 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.069012 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.069033 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.069068 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.083512 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.087848 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.087892 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.087906 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.087926 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.087940 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.100047 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.104303 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.104356 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.104373 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.104393 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.104406 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.114482 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.118609 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.118677 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.118688 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.118707 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.118722 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.130857 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.130998 4868 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.132959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.132995 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.133005 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.133022 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.133031 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.235312 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.235391 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.235414 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.235451 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.235475 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.339270 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.339351 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.339371 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.339403 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.339425 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.442872 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.442927 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.442964 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.443003 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.443028 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.544155 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.544210 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.544307 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:30 crc kubenswrapper[4868]: E1003 12:50:30.544517 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.546635 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.546666 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.546676 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.546692 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.546701 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.649986 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.650034 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.650043 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.650079 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.650093 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.732026 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.734345 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerStarted","Data":"4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.737856 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.739530 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.740868 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jpqwj" event={"ID":"61cc9d5b-e515-469c-a472-190ebf3609a3","Type":"ContainerStarted","Data":"ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.742403 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.748941 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.752361 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.752409 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.752420 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.752439 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.752449 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.761241 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.790988 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.806985 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.820402 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.832931 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.844956 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.854895 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.854955 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.854984 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.855007 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.855019 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.858887 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.867821 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.877777 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.890910 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.900840 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.912243 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.922400 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.932942 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.939596 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.954252 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.957471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.957516 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.957530 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.957554 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.957571 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:30Z","lastTransitionTime":"2025-10-03T12:50:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.970814 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:30 crc kubenswrapper[4868]: I1003 12:50:30.990102 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.003570 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.013173 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.023100 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.032091 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.040644 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.050115 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.059102 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.060200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.060271 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.060286 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.060312 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.060327 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.069945 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.083780 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.163497 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.163547 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.163574 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.163596 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.163609 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.267265 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.267326 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.267345 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.267367 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.267380 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.369586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.369624 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.369632 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.369645 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.369653 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.473711 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.473769 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.473782 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.473799 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.473814 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.545100 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:31 crc kubenswrapper[4868]: E1003 12:50:31.545473 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.577225 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.577321 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.577348 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.577384 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.577414 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.681964 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.682017 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.682033 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.682081 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.682096 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.747613 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7" exitCode=0 Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.747700 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.749929 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-597z4" event={"ID":"c8948782-cb43-4adf-a2a3-c5c22a3f1254","Type":"ContainerStarted","Data":"8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.759992 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.779283 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.784174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.784236 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.784254 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.784277 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.784292 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.795045 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.807678 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.820818 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.829680 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.850361 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.864591 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.877471 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.886263 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.886308 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.886325 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.886346 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.886364 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.887881 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.898070 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.907177 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.915446 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.922451 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.931943 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.943027 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.952969 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.962681 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.969752 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.988271 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.988706 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.988736 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.988745 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.988759 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.988769 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:31Z","lastTransitionTime":"2025-10-03T12:50:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:31 crc kubenswrapper[4868]: I1003 12:50:31.998394 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.009635 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.020522 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.029273 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.038642 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.053387 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.062834 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.069168 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.090809 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.090850 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.090863 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.090881 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.090894 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.193016 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.193080 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.193092 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.193109 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.193118 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.247742 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.247865 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.247893 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.247910 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.247953 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:50:40.247921561 +0000 UTC m=+36.457770627 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.248002 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248016 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248033 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248043 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248046 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248086 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248116 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:40.248099546 +0000 UTC m=+36.457948612 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248132 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:40.248126526 +0000 UTC m=+36.457975592 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248164 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248177 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248184 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:40.248158847 +0000 UTC m=+36.458007973 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248189 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.248219 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:40.248211858 +0000 UTC m=+36.458060914 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.295755 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.295794 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.295805 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.295820 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.295830 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.398310 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.398353 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.398363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.398379 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.398389 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.500815 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.500852 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.500861 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.500874 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.500883 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.543254 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.543319 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.543511 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:32 crc kubenswrapper[4868]: E1003 12:50:32.543675 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.603357 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.603435 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.603459 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.603493 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.603512 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.706744 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.706805 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.706815 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.706833 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.706850 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.766577 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.786494 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.799265 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.809954 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.810022 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.810043 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.810089 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.810103 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.813298 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.825222 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.839587 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.853806 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.867019 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.876151 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.885034 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.898095 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.913627 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.913694 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.913715 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.913745 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.913764 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:32Z","lastTransitionTime":"2025-10-03T12:50:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.913909 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.926812 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.937236 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.948124 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.961520 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.972713 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.982639 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:32 crc kubenswrapper[4868]: I1003 12:50:32.995483 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.006652 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.016357 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.016392 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.016402 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.016419 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.016430 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.019855 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.033372 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.044961 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.054241 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.069925 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.082413 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.095109 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.105355 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.120027 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.120102 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.120112 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.120136 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.120147 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.223179 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.223230 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.223243 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.223261 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.223274 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.326627 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.326668 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.326678 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.326697 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.326707 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.429613 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.429679 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.429700 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.429724 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.429743 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.532816 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.532933 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.532968 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.533010 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.533040 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.544031 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:33 crc kubenswrapper[4868]: E1003 12:50:33.544234 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.636308 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.636427 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.636449 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.636480 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.636501 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.739378 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.739418 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.739427 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.739441 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.739452 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.842605 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.842693 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.842722 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.842755 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.842778 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.945377 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.945580 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.945691 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.945816 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:33 crc kubenswrapper[4868]: I1003 12:50:33.945902 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:33Z","lastTransitionTime":"2025-10-03T12:50:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.048686 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.049131 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.049140 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.049157 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.049168 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.156392 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.156460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.156479 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.156503 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.156519 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.258642 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.258718 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.258728 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.258742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.258750 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.361520 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.361580 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.361589 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.361604 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.361613 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.463585 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.463624 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.463632 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.463650 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.463660 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.543963 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.544046 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:34 crc kubenswrapper[4868]: E1003 12:50:34.544138 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:34 crc kubenswrapper[4868]: E1003 12:50:34.544197 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.558512 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.565869 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.565940 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.565956 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.565974 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.565985 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.571368 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.588110 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.605104 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.620524 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.638110 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.658661 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.667941 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.667991 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.668004 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.668028 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.668041 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.673226 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.704724 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.723521 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.751912 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.762821 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.764572 4868 generic.go:334] "Generic (PLEG): container finished" podID="e2c273fa-527b-44a3-acd1-37e17e1f7d1c" containerID="4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b" exitCode=0 Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.764642 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerDied","Data":"4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.764633 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.767545 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.770160 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.770194 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.770205 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.770219 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.770228 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.776322 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.786021 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.796953 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.810334 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.823157 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.835396 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.848947 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.861145 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.871758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.871787 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.871815 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.871829 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.871839 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.872882 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.884622 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.901167 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.913319 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.924084 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.935956 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.949539 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.962901 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.973847 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.973899 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.973909 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.973926 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:34 crc kubenswrapper[4868]: I1003 12:50:34.973939 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:34Z","lastTransitionTime":"2025-10-03T12:50:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.076626 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.076676 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.076687 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.076715 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.076725 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.178581 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.178642 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.178653 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.178669 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.178679 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.281132 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.281192 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.281209 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.281233 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.281252 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.384473 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.384563 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.384584 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.384614 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.384635 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.487401 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.487437 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.487447 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.487461 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.487475 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.543326 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:35 crc kubenswrapper[4868]: E1003 12:50:35.543457 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.590293 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.590332 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.590341 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.590357 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.590372 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.692824 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.692867 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.692878 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.692896 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.692907 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.794817 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.794863 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.794873 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.794886 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.794894 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.897157 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.897222 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.897240 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.897263 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:35 crc kubenswrapper[4868]: I1003 12:50:35.897278 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:35Z","lastTransitionTime":"2025-10-03T12:50:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.003019 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.003100 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.003111 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.003131 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.003145 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.105776 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.105838 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.105858 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.105889 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.105910 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.208686 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.208736 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.208747 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.208764 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.208776 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.311848 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.311889 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.311902 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.311920 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.311930 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.414223 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.414254 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.414262 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.414276 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.414286 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.517693 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.517769 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.517782 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.517799 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.517810 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.543315 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.543508 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:36 crc kubenswrapper[4868]: E1003 12:50:36.543550 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:36 crc kubenswrapper[4868]: E1003 12:50:36.543594 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.620571 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.620647 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.620667 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.620702 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.620724 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.723255 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.723300 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.723315 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.723330 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.723346 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.776126 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.777880 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerStarted","Data":"d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.791257 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.802681 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.812293 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.825808 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.825843 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.825853 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.825866 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.825875 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.826143 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.838920 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.851651 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.863210 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.872828 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.882102 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.897434 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.909370 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.921751 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.928203 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.928223 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.928231 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.928245 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.928253 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:36Z","lastTransitionTime":"2025-10-03T12:50:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.934347 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:36 crc kubenswrapper[4868]: I1003 12:50:36.948096 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.030433 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.030486 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.030495 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.030514 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.030524 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.133326 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.133374 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.133400 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.133423 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.133438 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.236087 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.236120 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.236129 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.236145 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.236154 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.339216 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.339343 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.339358 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.339378 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.339390 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.441864 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.441950 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.441966 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.441990 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.442003 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.543198 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:37 crc kubenswrapper[4868]: E1003 12:50:37.543392 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.544786 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.544819 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.544831 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.544848 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.544860 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.647614 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.647673 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.647687 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.647702 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.647710 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.749955 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.750017 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.750034 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.750084 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.750097 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.792248 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.803960 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.815830 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.825704 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.837091 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.848384 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.852091 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.852118 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.852127 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.852141 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.852151 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.862938 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.878615 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.890090 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.904519 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.923847 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.941111 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.953970 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.954344 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.954379 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.954388 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.954404 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.954414 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:37Z","lastTransitionTime":"2025-10-03T12:50:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:37 crc kubenswrapper[4868]: I1003 12:50:37.971279 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.056500 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.056542 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.056552 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.056569 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.056588 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.159410 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.159457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.159466 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.159483 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.159492 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.261277 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.261315 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.261323 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.261336 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.261345 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.364912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.364959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.364970 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.364987 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.364997 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.467085 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.467122 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.467133 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.467148 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.467162 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.543804 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.543858 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:38 crc kubenswrapper[4868]: E1003 12:50:38.543962 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:38 crc kubenswrapper[4868]: E1003 12:50:38.544133 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.570170 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.570213 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.570229 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.570246 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.570260 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.672085 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.672126 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.672136 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.672151 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.672161 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.715035 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt"] Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.715501 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.717358 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.718046 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.730676 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.741188 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.758981 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.771299 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.774789 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.774823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.774832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.774846 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.774855 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.785317 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.785592 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.803163 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.814348 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.814520 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ld996\" (UniqueName: \"kubernetes.io/projected/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-kube-api-access-ld996\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.814620 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.814673 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.814703 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.825563 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.840112 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.850374 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.859181 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.870075 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.876514 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.876555 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.876567 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.876584 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.876596 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.880994 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.892977 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.904575 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.916128 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ld996\" (UniqueName: \"kubernetes.io/projected/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-kube-api-access-ld996\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.916185 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.916208 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.916225 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.916974 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.917011 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.921385 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.932229 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ld996\" (UniqueName: \"kubernetes.io/projected/7ce330d9-09c6-4bf3-b485-ea0a34b8f32d-kube-api-access-ld996\") pod \"ovnkube-control-plane-749d76644c-wcttt\" (UID: \"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.978684 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.978749 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.978760 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.978774 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:38 crc kubenswrapper[4868]: I1003 12:50:38.978783 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:38Z","lastTransitionTime":"2025-10-03T12:50:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.030648 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" Oct 03 12:50:39 crc kubenswrapper[4868]: W1003 12:50:39.041749 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ce330d9_09c6_4bf3_b485_ea0a34b8f32d.slice/crio-a0c2855f3a282dc442fd8497dca4a7b73e22af490922a9fea38c33ff7ba7e471 WatchSource:0}: Error finding container a0c2855f3a282dc442fd8497dca4a7b73e22af490922a9fea38c33ff7ba7e471: Status 404 returned error can't find the container with id a0c2855f3a282dc442fd8497dca4a7b73e22af490922a9fea38c33ff7ba7e471 Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.082007 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.082195 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.082213 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.082236 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.082248 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.184389 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.184436 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.184450 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.184468 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.184480 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.286445 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.286482 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.286494 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.286509 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.286520 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.389739 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.390089 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.390102 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.390120 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.390133 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.492943 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.492980 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.492988 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.493009 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.493019 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.543517 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:39 crc kubenswrapper[4868]: E1003 12:50:39.543669 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.544187 4868 scope.go:117] "RemoveContainer" containerID="8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.595347 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.596017 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.596090 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.596114 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.596138 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.699136 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.699169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.699178 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.699191 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.699202 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.790385 4868 generic.go:334] "Generic (PLEG): container finished" podID="e2c273fa-527b-44a3-acd1-37e17e1f7d1c" containerID="d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2" exitCode=0 Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.790483 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerDied","Data":"d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.792392 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" event={"ID":"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d","Type":"ContainerStarted","Data":"5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.792435 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" event={"ID":"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d","Type":"ContainerStarted","Data":"a0c2855f3a282dc442fd8497dca4a7b73e22af490922a9fea38c33ff7ba7e471"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.795489 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.798289 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-nwqvb"] Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.798787 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:39 crc kubenswrapper[4868]: E1003 12:50:39.798852 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.805458 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.805498 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.805510 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.805527 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.805540 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.808779 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.822672 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.823611 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.823683 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rctt\" (UniqueName: \"kubernetes.io/projected/4fc2d690-5dcc-4f98-8607-0b3909f44c23-kube-api-access-4rctt\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.833273 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.843606 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.853862 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.865350 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.882012 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.897774 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.915721 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.917389 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.917440 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.917455 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.917471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.917482 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:39Z","lastTransitionTime":"2025-10-03T12:50:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.924636 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rctt\" (UniqueName: \"kubernetes.io/projected/4fc2d690-5dcc-4f98-8607-0b3909f44c23-kube-api-access-4rctt\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.924770 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:39 crc kubenswrapper[4868]: E1003 12:50:39.924888 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:39 crc kubenswrapper[4868]: E1003 12:50:39.924974 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:40.42495341 +0000 UTC m=+36.634802486 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.934885 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.959236 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rctt\" (UniqueName: \"kubernetes.io/projected/4fc2d690-5dcc-4f98-8607-0b3909f44c23-kube-api-access-4rctt\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.970749 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:39 crc kubenswrapper[4868]: I1003 12:50:39.984110 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.001329 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.014170 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.020651 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.020696 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.020707 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.020726 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.020738 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.026871 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.040035 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.056274 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.070010 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.082380 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.095521 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.112574 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.123348 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.123393 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.123404 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.123423 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.123434 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.123979 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.135525 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.149046 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.159856 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.175183 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.190888 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.204361 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.216857 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.225795 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.225834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.225847 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.225866 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.225878 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.242443 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.260165 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.326980 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.327139 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327182 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:50:56.327152677 +0000 UTC m=+52.537001743 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.327245 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.327274 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327278 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.327292 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327303 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327317 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327373 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:56.327353002 +0000 UTC m=+52.537202138 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327413 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327426 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327437 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327468 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:56.327460505 +0000 UTC m=+52.537309571 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327507 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327528 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:56.327521256 +0000 UTC m=+52.537370322 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327551 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.327570 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:56.327565327 +0000 UTC m=+52.537414383 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.328882 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.328923 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.328934 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.328952 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.328962 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.411957 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.411992 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.412003 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.412019 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.412030 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.428122 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.428274 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.428332 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:41.428312039 +0000 UTC m=+37.638161105 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.428857 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.433981 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.434013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.434024 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.434043 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.434074 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.446905 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.452789 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.452831 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.452843 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.452860 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.452870 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.467662 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.471494 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.471525 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.471533 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.471548 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.471558 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.483181 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.486319 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.486348 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.486358 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.486372 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.486383 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.498592 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.498699 4868 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.500068 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.500101 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.500112 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.500128 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.500140 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.543129 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.543196 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.543266 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:40 crc kubenswrapper[4868]: E1003 12:50:40.543432 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.602972 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.603008 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.603016 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.603031 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.603040 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.706384 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.706437 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.706450 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.706470 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.706486 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.801795 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.803999 4868 generic.go:334] "Generic (PLEG): container finished" podID="e2c273fa-527b-44a3-acd1-37e17e1f7d1c" containerID="0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899" exitCode=0 Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.804035 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerDied","Data":"0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.806148 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.808706 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.808795 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.808931 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.809013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.809123 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.809452 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.809704 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.811244 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" event={"ID":"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d","Type":"ContainerStarted","Data":"d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.818807 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.830970 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.854552 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.874357 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.888852 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.909189 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.911587 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.911646 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.911657 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.911678 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.911730 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:40Z","lastTransitionTime":"2025-10-03T12:50:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.924731 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.936518 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.949132 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.961332 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.974075 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.984300 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:40 crc kubenswrapper[4868]: I1003 12:50:40.995382 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:40Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.008555 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.019715 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.019771 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.019781 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.019802 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.019815 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.022107 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.035531 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.049110 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.067035 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.080367 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.099564 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.113119 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.121871 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.121949 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.121959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.121973 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.121984 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.128615 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.142162 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.155889 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.167601 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.181093 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.192575 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.204665 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.220081 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.224252 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.224280 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.224288 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.224302 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.224312 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.232332 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.245209 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.259187 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.326156 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.326202 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.326215 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.326232 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.326244 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.428453 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.428505 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.428520 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.428536 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.428548 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.445325 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:41 crc kubenswrapper[4868]: E1003 12:50:41.445642 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:41 crc kubenswrapper[4868]: E1003 12:50:41.445838 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:43.44580055 +0000 UTC m=+39.655649656 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.531279 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.531329 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.531343 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.531363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.531377 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.543654 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.543704 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:41 crc kubenswrapper[4868]: E1003 12:50:41.543796 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:41 crc kubenswrapper[4868]: E1003 12:50:41.543955 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.633290 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.633339 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.633349 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.633366 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.633377 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.735912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.735963 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.735974 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.735991 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.736002 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.818545 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.820861 4868 generic.go:334] "Generic (PLEG): container finished" podID="e2c273fa-527b-44a3-acd1-37e17e1f7d1c" containerID="ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d" exitCode=0 Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.820936 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerDied","Data":"ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.835176 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.838916 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.838979 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.838992 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.839014 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.839026 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.855751 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.878489 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.890597 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.915294 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.930676 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.941504 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.941541 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.941553 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.941568 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.941579 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:41Z","lastTransitionTime":"2025-10-03T12:50:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.944982 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.958628 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.973600 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:41 crc kubenswrapper[4868]: I1003 12:50:41.991976 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:41Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.005365 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.020893 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.036849 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.044301 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.044344 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.044354 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.044371 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.044383 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.051026 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.065248 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.081874 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.146971 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.147012 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.147031 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.147068 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.147082 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.249746 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.249783 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.249792 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.249808 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.249817 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.353103 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.353158 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.353169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.353193 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.353212 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.455727 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.455861 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.455884 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.455915 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.455937 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.543876 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.543955 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:42 crc kubenswrapper[4868]: E1003 12:50:42.544235 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:42 crc kubenswrapper[4868]: E1003 12:50:42.544359 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.558432 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.558479 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.558494 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.558512 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.558527 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.661607 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.661681 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.661694 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.661710 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.661734 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.763900 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.763933 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.763942 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.763957 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.763967 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.827575 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerStarted","Data":"b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.840267 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.853380 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.866036 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.866099 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.866112 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.866134 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.866147 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.871308 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.888041 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.901572 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.914356 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.941489 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.956512 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.969486 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.969535 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.969548 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.969570 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.969584 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:42Z","lastTransitionTime":"2025-10-03T12:50:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.973392 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:42 crc kubenswrapper[4868]: I1003 12:50:42.990906 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:42Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.007779 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:43Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.020421 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:43Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.034966 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:43Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.051177 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:43Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.065092 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:43Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.072096 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.072140 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.072160 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.072182 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.072196 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.080704 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:43Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.175353 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.175766 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.175780 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.175803 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.175817 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.278809 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.278849 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.278860 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.278876 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.278889 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.381506 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.381580 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.381601 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.381631 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.381650 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.464928 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:43 crc kubenswrapper[4868]: E1003 12:50:43.464921 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:43 crc kubenswrapper[4868]: E1003 12:50:43.465125 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:47.465098114 +0000 UTC m=+43.674947180 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.483758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.483797 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.483809 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.483826 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.483837 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.543284 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.543295 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:43 crc kubenswrapper[4868]: E1003 12:50:43.543573 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:43 crc kubenswrapper[4868]: E1003 12:50:43.543438 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.586087 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.586129 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.586142 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.586158 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.586168 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.687968 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.688041 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.688118 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.688152 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.688173 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.789958 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.790018 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.790042 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.790094 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.790114 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.833774 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.893080 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.893118 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.893138 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.893166 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.893182 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.995419 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.995453 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.995464 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.995481 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:43 crc kubenswrapper[4868]: I1003 12:50:43.995491 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:43Z","lastTransitionTime":"2025-10-03T12:50:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.098389 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.098438 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.098454 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.098471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.098483 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.200794 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.200842 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.200912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.200928 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.200938 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.303152 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.303234 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.303247 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.303263 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.303273 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.406708 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.406819 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.406838 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.406868 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.406889 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.510895 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.510951 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.510965 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.510984 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.510999 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.543585 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.543685 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:44 crc kubenswrapper[4868]: E1003 12:50:44.543742 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:44 crc kubenswrapper[4868]: E1003 12:50:44.543849 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.569231 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.584502 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.603960 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.613637 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.613709 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.613731 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.613766 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.613787 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.623563 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.640488 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.656246 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.683681 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.696713 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.712438 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.716326 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.716363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.716375 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.716394 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.716406 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.731341 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.749541 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.761710 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.778634 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.791563 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.807016 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.818857 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.819384 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.819563 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.819653 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.819774 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.819847 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.840095 4868 generic.go:334] "Generic (PLEG): container finished" podID="e2c273fa-527b-44a3-acd1-37e17e1f7d1c" containerID="b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959" exitCode=0 Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.840142 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerDied","Data":"b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.854706 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.873451 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.886365 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.902787 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.923174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.923211 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.923223 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.923244 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.923260 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:44Z","lastTransitionTime":"2025-10-03T12:50:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.926548 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.941192 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.955883 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:44 crc kubenswrapper[4868]: I1003 12:50:44.994325 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:44Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.018609 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.026405 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.026460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.026475 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.026502 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.026514 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.040424 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.052200 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.063228 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.074047 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.094180 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.106713 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.124354 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.129345 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.129390 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.129403 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.129422 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.129433 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.231880 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.231929 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.231942 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.231964 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.231979 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.334485 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.334528 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.334539 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.334555 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.334566 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.437313 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.437563 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.437572 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.437586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.437596 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.539713 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.539758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.539770 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.539784 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.539792 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.543350 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.543358 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:45 crc kubenswrapper[4868]: E1003 12:50:45.543514 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:45 crc kubenswrapper[4868]: E1003 12:50:45.543625 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.642585 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.642677 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.642721 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.642742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.642755 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.745728 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.745769 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.745779 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.745799 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.745810 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.847890 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.847969 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.847990 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.848019 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.848039 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.849599 4868 generic.go:334] "Generic (PLEG): container finished" podID="e2c273fa-527b-44a3-acd1-37e17e1f7d1c" containerID="3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0" exitCode=0 Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.849696 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerDied","Data":"3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.855493 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.866185 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.881390 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.898547 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.915813 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.931895 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.947536 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.951207 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.951248 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.951258 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.951279 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.951301 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:45Z","lastTransitionTime":"2025-10-03T12:50:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.970355 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:45 crc kubenswrapper[4868]: I1003 12:50:45.983549 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:45.999680 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:45Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.013948 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.028668 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.040769 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.053785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.053836 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.053851 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.053874 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.053886 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.056845 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.070585 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.082497 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.095513 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.157421 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.157480 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.157492 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.157515 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.157527 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.260547 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.260948 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.260960 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.260978 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.260990 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.364433 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.364497 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.364509 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.364534 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.364546 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.467152 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.467194 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.467206 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.467224 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.467236 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.543170 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.543198 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:46 crc kubenswrapper[4868]: E1003 12:50:46.543338 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:46 crc kubenswrapper[4868]: E1003 12:50:46.543452 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.569350 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.569386 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.569395 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.569409 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.569418 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.672510 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.672561 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.672572 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.672587 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.672598 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.774916 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.774983 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.774992 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.775009 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.775021 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.861916 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" event={"ID":"e2c273fa-527b-44a3-acd1-37e17e1f7d1c","Type":"ContainerStarted","Data":"0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.862309 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.862453 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.877499 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.878783 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.878824 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.878834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.878849 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.878858 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.892132 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.907545 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.914724 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.915706 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.920594 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.933768 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.947393 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.959823 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.972402 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.980681 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.980735 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.980748 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.980765 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.980777 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:46Z","lastTransitionTime":"2025-10-03T12:50:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:46 crc kubenswrapper[4868]: I1003 12:50:46.988653 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:46Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.003321 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.015783 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.031316 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.043782 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.055666 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.075643 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.083817 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.083893 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.083906 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.083937 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.083948 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.089039 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.101132 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.112428 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.126133 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.144416 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.157881 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.168571 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.186990 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.187038 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.187047 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.187078 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.187087 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.193884 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.207730 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.221973 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.235686 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.247866 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.260816 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.276299 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.289803 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.290183 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.290288 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.290392 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.290675 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.290888 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.302005 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.312506 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:47Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.395298 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.395353 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.395366 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.395389 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.395403 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.498923 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.498969 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.498981 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.499000 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.499016 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.510676 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:47 crc kubenswrapper[4868]: E1003 12:50:47.510829 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:47 crc kubenswrapper[4868]: E1003 12:50:47.510878 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:50:55.510862099 +0000 UTC m=+51.720711165 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.544206 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.544275 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:47 crc kubenswrapper[4868]: E1003 12:50:47.544394 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:47 crc kubenswrapper[4868]: E1003 12:50:47.544524 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.603513 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.603572 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.603586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.603610 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.603629 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.707723 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.708772 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.708852 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.708927 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.709066 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.812333 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.812586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.812706 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.812807 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.812908 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.865311 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.915517 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.915572 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.915587 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.915605 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:47 crc kubenswrapper[4868]: I1003 12:50:47.915619 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:47Z","lastTransitionTime":"2025-10-03T12:50:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.017683 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.017729 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.017739 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.017753 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.017764 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.119865 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.119898 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.119906 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.119921 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.119931 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.221847 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.221904 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.221916 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.221938 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.221950 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.324709 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.324748 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.324757 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.324771 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.324779 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.427456 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.427498 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.427506 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.427520 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.427530 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.529865 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.529914 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.529928 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.529945 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.529955 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.543417 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.543482 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:48 crc kubenswrapper[4868]: E1003 12:50:48.543706 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:48 crc kubenswrapper[4868]: E1003 12:50:48.543813 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.633157 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.633239 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.633258 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.633290 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.633312 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.736124 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.736169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.736178 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.736196 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.736208 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.838622 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.838678 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.838692 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.838710 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.838724 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.868810 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.942260 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.942309 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.942323 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.942343 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:48 crc kubenswrapper[4868]: I1003 12:50:48.942359 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:48Z","lastTransitionTime":"2025-10-03T12:50:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.046215 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.046269 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.046280 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.046298 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.046310 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.148140 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.148170 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.148178 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.148191 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.148200 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.250396 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.250428 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.250438 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.250452 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.250461 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.353496 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.353552 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.353561 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.353579 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.353589 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.456550 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.456592 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.456604 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.456625 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.456643 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.543374 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.543468 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:49 crc kubenswrapper[4868]: E1003 12:50:49.543595 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:49 crc kubenswrapper[4868]: E1003 12:50:49.543735 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.559294 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.559334 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.559347 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.559363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.559374 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.661600 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.661953 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.662044 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.662147 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.662229 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.765391 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.766273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.766381 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.766484 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.766574 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.871205 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.871273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.871286 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.871305 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.871320 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.875073 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/0.log" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.877593 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984" exitCode=1 Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.877640 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.878419 4868 scope.go:117] "RemoveContainer" containerID="6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.891444 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.904454 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.921249 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.938150 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.951945 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.962431 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.974298 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.974336 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.974347 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.974363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.974374 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:49Z","lastTransitionTime":"2025-10-03T12:50:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.974616 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:49 crc kubenswrapper[4868]: I1003 12:50:49.994277 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:49Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.008694 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.028659 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.041629 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.055278 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.074532 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:49Z\\\",\\\"message\\\":\\\"49.353108 6247 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:50:49.353134 6247 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:49.353206 6247 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:50:49.353288 6247 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.353363 6247 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.353710 6247 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.354144 6247 factory.go:656] Stopping watch factory\\\\nI1003 12:50:49.354665 6247 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.354689 6247 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.076942 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.076969 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.076981 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.077003 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.077014 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.084118 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.099074 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.111390 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.179608 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.179657 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.179670 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.179700 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.179713 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.284147 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.284189 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.284205 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.284227 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.284243 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.386382 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.386412 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.386419 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.386433 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.386443 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.489097 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.489187 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.489201 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.489226 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.489240 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.543788 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.543846 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.543979 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.544270 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.591484 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.591554 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.591566 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.591585 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.591601 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.693532 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.693566 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.693574 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.693588 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.693600 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.796822 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.796866 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.796877 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.796894 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.796903 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.884113 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/0.log" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.886944 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.887207 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.899666 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.899726 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.899740 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.899766 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.899783 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.901269 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.901306 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.901323 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.901337 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.901349 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.901839 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.914404 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.917472 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.919117 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.919164 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.919174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.919194 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.919205 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.936451 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.940940 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.940992 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.941007 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.941032 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.941066 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.941757 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:49Z\\\",\\\"message\\\":\\\"49.353108 6247 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:50:49.353134 6247 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:49.353206 6247 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:50:49.353288 6247 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.353363 6247 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.353710 6247 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.354144 6247 factory.go:656] Stopping watch factory\\\\nI1003 12:50:49.354665 6247 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.354689 6247 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.955042 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.959768 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.963786 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.963823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.963833 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.963854 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.963867 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.971647 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.977993 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.983074 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.983117 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.983128 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.983147 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.983160 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:50Z","lastTransitionTime":"2025-10-03T12:50:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:50 crc kubenswrapper[4868]: I1003 12:50:50.989456 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.995300 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:50Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:50 crc kubenswrapper[4868]: E1003 12:50:50.995786 4868 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.002126 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.002172 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.002183 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.002201 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.002214 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.005803 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.021392 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.037276 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.052839 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.063719 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.074957 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.089436 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.100677 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.104107 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.104139 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.104151 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.104168 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.104180 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.116165 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.133168 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.141947 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.149710 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.157415 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.170315 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.180220 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.190165 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.201283 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.206330 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.206377 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.206389 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.206407 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.206419 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.212433 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.222840 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.224356 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.239289 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.251265 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.261637 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.279226 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:49Z\\\",\\\"message\\\":\\\"49.353108 6247 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:50:49.353134 6247 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:49.353206 6247 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:50:49.353288 6247 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.353363 6247 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.353710 6247 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.354144 6247 factory.go:656] Stopping watch factory\\\\nI1003 12:50:49.354665 6247 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.354689 6247 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.291464 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.303429 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.308948 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.308985 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.308996 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.309013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.309024 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.315085 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.327524 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.336918 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.412127 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.412174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.412189 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.412207 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.412218 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.515433 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.515489 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.515502 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.515523 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.515539 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.543325 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:51 crc kubenswrapper[4868]: E1003 12:50:51.543546 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.543352 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:51 crc kubenswrapper[4868]: E1003 12:50:51.543766 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.618987 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.619045 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.619095 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.619129 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.619147 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.722111 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.722149 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.722168 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.722187 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.722199 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.824519 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.824568 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.824579 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.824601 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.824618 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.893574 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/1.log" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.894501 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/0.log" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.898309 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5" exitCode=1 Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.898497 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.898569 4868 scope.go:117] "RemoveContainer" containerID="6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.899674 4868 scope.go:117] "RemoveContainer" containerID="e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5" Oct 03 12:50:51 crc kubenswrapper[4868]: E1003 12:50:51.899859 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.922382 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.928333 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.928375 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.928386 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.928457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.928472 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:51Z","lastTransitionTime":"2025-10-03T12:50:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.935577 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.960551 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6bc595018b25cd5950632412e2bc3855fbb9ad91eb7e821485b5ef0bb562a984\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:49Z\\\",\\\"message\\\":\\\"49.353108 6247 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:50:49.353134 6247 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:49.353206 6247 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:50:49.353288 6247 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.353363 6247 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.353710 6247 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI1003 12:50:49.354144 6247 factory.go:656] Stopping watch factory\\\\nI1003 12:50:49.354665 6247 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:49.354689 6247 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"message\\\":\\\"m k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:51.031999 6414 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032137 6414 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032743 6414 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:50:51.032765 6414 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:50:51.032782 6414 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:50:51.032803 6414 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:50:51.032822 6414 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:50:51.032830 6414 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:50:51.032864 6414 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:50:51.032870 6414 factory.go:656] Stopping watch factory\\\\nI1003 12:50:51.032906 6414 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:50:51.032913 6414 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12:50:51.032882 6414 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.975374 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:51 crc kubenswrapper[4868]: I1003 12:50:51.991403 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:51Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.007343 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.019300 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.029231 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.030600 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.030634 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.030642 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.030657 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.030667 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.041301 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.052374 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.065474 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.076447 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.088263 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.104485 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.116928 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.131315 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.132665 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.132701 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.132713 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.132729 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.132739 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.148113 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.236101 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.236145 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.236156 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.236174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.236186 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.339638 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.339688 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.339697 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.339716 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.339726 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.441619 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.441654 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.441662 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.441678 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.441882 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.543206 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.543213 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:52 crc kubenswrapper[4868]: E1003 12:50:52.543342 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:52 crc kubenswrapper[4868]: E1003 12:50:52.543432 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.544228 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.544273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.544286 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.544299 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.544310 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.646318 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.646355 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.646363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.646398 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.646408 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.749777 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.749832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.749841 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.749863 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.749874 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.852244 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.852319 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.852347 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.852365 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.852374 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.904630 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/1.log" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.908018 4868 scope.go:117] "RemoveContainer" containerID="e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5" Oct 03 12:50:52 crc kubenswrapper[4868]: E1003 12:50:52.908217 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.921676 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.934496 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.955480 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.955538 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.955640 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.955650 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.955664 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.955673 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:52Z","lastTransitionTime":"2025-10-03T12:50:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.969152 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:52 crc kubenswrapper[4868]: I1003 12:50:52.984805 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.000301 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:52Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.013963 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.031510 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.049259 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.059147 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.059199 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.059215 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.059239 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.059254 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.069548 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.084684 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.108788 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"message\\\":\\\"m k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:51.031999 6414 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032137 6414 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032743 6414 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:50:51.032765 6414 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:50:51.032782 6414 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:50:51.032803 6414 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:50:51.032822 6414 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:50:51.032830 6414 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:50:51.032864 6414 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:50:51.032870 6414 factory.go:656] Stopping watch factory\\\\nI1003 12:50:51.032906 6414 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:50:51.032913 6414 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12:50:51.032882 6414 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.122193 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.140354 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.159242 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.161529 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.161597 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.161610 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.161630 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.161647 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.177343 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.191279 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:53Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.264714 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.264748 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.264756 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.264774 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.264784 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.367120 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.367164 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.367175 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.367191 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.367202 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.469880 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.469928 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.469938 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.469955 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.469967 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.543656 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.543675 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:53 crc kubenswrapper[4868]: E1003 12:50:53.543933 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:53 crc kubenswrapper[4868]: E1003 12:50:53.543791 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.572473 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.572554 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.572568 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.572586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.572597 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.675365 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.675422 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.675437 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.675458 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.675472 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.778253 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.778298 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.778330 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.778345 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.778354 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.880900 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.880976 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.880999 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.881028 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.881098 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.983966 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.984011 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.984018 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.984034 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:53 crc kubenswrapper[4868]: I1003 12:50:53.984048 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:53Z","lastTransitionTime":"2025-10-03T12:50:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.055882 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.070641 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.086352 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.086395 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.086404 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.086423 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.086433 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.088338 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.102166 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.117163 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.127328 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.140012 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.150984 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.172934 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"message\\\":\\\"m k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:51.031999 6414 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032137 6414 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032743 6414 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:50:51.032765 6414 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:50:51.032782 6414 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:50:51.032803 6414 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:50:51.032822 6414 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:50:51.032830 6414 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:50:51.032864 6414 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:50:51.032870 6414 factory.go:656] Stopping watch factory\\\\nI1003 12:50:51.032906 6414 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:50:51.032913 6414 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12:50:51.032882 6414 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.185243 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.188094 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.188115 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.188123 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.188136 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.188145 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.202322 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.218501 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.232335 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.242878 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.257187 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.271508 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.285083 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.290101 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.290139 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.290148 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.290162 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.290173 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.296532 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.392786 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.392818 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.392827 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.392841 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.392849 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.496246 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.496291 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.496300 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.496320 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.496329 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.543473 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.543551 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:54 crc kubenswrapper[4868]: E1003 12:50:54.543637 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:54 crc kubenswrapper[4868]: E1003 12:50:54.543748 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.558567 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.572087 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.582500 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.598841 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.598879 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.598891 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.598930 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.598943 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.599079 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.613718 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.624735 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.639157 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.652288 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.665802 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.680657 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.695211 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.701112 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.701154 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.701165 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.701180 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.701189 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.710836 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.722459 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.738978 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.772297 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"message\\\":\\\"m k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:51.031999 6414 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032137 6414 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032743 6414 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:50:51.032765 6414 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:50:51.032782 6414 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:50:51.032803 6414 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:50:51.032822 6414 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:50:51.032830 6414 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:50:51.032864 6414 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:50:51.032870 6414 factory.go:656] Stopping watch factory\\\\nI1003 12:50:51.032906 6414 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:50:51.032913 6414 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12:50:51.032882 6414 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.786841 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.798407 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:50:54Z is after 2025-08-24T17:21:41Z" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.802746 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.802777 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.802786 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.802802 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.802811 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.905570 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.905620 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.905632 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.905647 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:54 crc kubenswrapper[4868]: I1003 12:50:54.905659 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:54Z","lastTransitionTime":"2025-10-03T12:50:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.007797 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.007838 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.007846 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.007862 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.007872 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.110277 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.110314 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.110325 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.110358 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.110369 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.213316 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.213362 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.213371 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.213387 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.213396 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.317228 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.317280 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.317291 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.317309 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.317327 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.419768 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.419822 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.419834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.419851 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.419863 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.522166 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.522222 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.522232 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.522246 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.522257 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.543656 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.543655 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:55 crc kubenswrapper[4868]: E1003 12:50:55.543842 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:55 crc kubenswrapper[4868]: E1003 12:50:55.544105 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.599965 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:55 crc kubenswrapper[4868]: E1003 12:50:55.600257 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:55 crc kubenswrapper[4868]: E1003 12:50:55.600403 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:51:11.60037906 +0000 UTC m=+67.810228126 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.625457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.625526 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.625546 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.625575 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.625597 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.729235 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.729302 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.729318 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.729341 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.729364 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.832445 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.832506 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.832514 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.832530 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.832543 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.935351 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.935429 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.935441 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.935464 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:55 crc kubenswrapper[4868]: I1003 12:50:55.935482 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:55Z","lastTransitionTime":"2025-10-03T12:50:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.038629 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.038686 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.038707 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.038726 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.038739 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.141126 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.141168 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.141178 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.141211 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.141222 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.244097 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.244135 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.244144 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.244163 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.244174 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.348023 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.348136 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.348158 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.348183 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.348204 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.409130 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.409275 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409347 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:51:28.409316914 +0000 UTC m=+84.619165980 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.409490 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409593 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.409603 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409641 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:51:28.409630462 +0000 UTC m=+84.619479608 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.409663 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409703 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409743 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409767 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409781 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409795 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:51:28.409772456 +0000 UTC m=+84.619621522 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409814 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:51:28.409805356 +0000 UTC m=+84.619654522 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409842 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409854 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409874 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.409908 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:51:28.409899279 +0000 UTC m=+84.619748475 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.450849 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.450886 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.450894 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.450908 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.450917 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.543980 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.544073 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.544161 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:56 crc kubenswrapper[4868]: E1003 12:50:56.544274 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.553663 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.553698 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.553707 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.553720 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.553731 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.656573 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.656609 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.656618 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.656632 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.656641 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.759144 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.759234 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.759251 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.759269 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.759280 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.863100 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.863169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.863181 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.863199 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.863210 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.965843 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.965888 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.965896 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.965910 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:56 crc kubenswrapper[4868]: I1003 12:50:56.965919 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:56Z","lastTransitionTime":"2025-10-03T12:50:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.068381 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.068422 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.068433 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.068450 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.068461 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.170703 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.170762 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.170775 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.170797 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.170810 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.273418 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.273531 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.273552 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.273581 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.273599 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.375956 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.376000 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.376012 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.376030 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.376042 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.478684 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.478719 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.478730 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.478748 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.478759 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.543756 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.543762 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:57 crc kubenswrapper[4868]: E1003 12:50:57.544016 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:57 crc kubenswrapper[4868]: E1003 12:50:57.544155 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.581043 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.581640 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.581793 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.581955 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.582139 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.685135 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.685176 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.685186 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.685201 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.685210 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.788026 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.788087 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.788096 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.788110 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.788119 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.891267 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.891324 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.891338 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.891359 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.891373 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.994851 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.994920 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.994959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.994999 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:57 crc kubenswrapper[4868]: I1003 12:50:57.995024 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:57Z","lastTransitionTime":"2025-10-03T12:50:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.097864 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.097923 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.097936 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.097959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.097972 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.201548 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.201608 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.201632 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.201658 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.201675 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.304400 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.304467 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.304477 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.304497 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.304510 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.407007 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.407106 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.407119 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.407142 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.407157 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.510557 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.510663 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.510693 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.510736 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.510766 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.543528 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.543633 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:50:58 crc kubenswrapper[4868]: E1003 12:50:58.543718 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:50:58 crc kubenswrapper[4868]: E1003 12:50:58.543824 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.613464 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.613517 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.613526 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.613544 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.613557 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.716344 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.716380 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.716388 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.716410 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.716420 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.819128 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.819172 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.819184 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.819200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.819211 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.922014 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.922083 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.922099 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.922115 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:58 crc kubenswrapper[4868]: I1003 12:50:58.922124 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:58Z","lastTransitionTime":"2025-10-03T12:50:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.024225 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.024266 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.024277 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.024295 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.024306 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.127395 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.127443 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.127452 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.127466 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.127475 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.230708 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.230742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.230752 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.230765 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.230774 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.333353 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.333429 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.333443 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.333464 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.333476 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.437116 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.437187 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.437201 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.437226 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.437627 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.541460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.541510 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.541523 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.541544 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.541561 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.543020 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.543020 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:50:59 crc kubenswrapper[4868]: E1003 12:50:59.543184 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:50:59 crc kubenswrapper[4868]: E1003 12:50:59.543235 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.644907 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.644970 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.644992 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.645020 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.645039 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.749125 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.749186 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.749201 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.749225 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.749241 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.852452 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.852501 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.852513 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.852532 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.852546 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.955900 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.955956 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.955969 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.955989 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:50:59 crc kubenswrapper[4868]: I1003 12:50:59.956000 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:50:59Z","lastTransitionTime":"2025-10-03T12:50:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.059141 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.059208 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.059223 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.059243 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.059257 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.162321 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.162382 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.162393 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.162412 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.162423 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.265518 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.265592 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.265603 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.265627 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.265641 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.368785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.368876 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.368887 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.368906 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.368921 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.472196 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.472245 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.472257 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.472274 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.472286 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.543389 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.543466 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:00 crc kubenswrapper[4868]: E1003 12:51:00.543629 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:00 crc kubenswrapper[4868]: E1003 12:51:00.543728 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.574597 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.574648 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.574657 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.574671 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.574680 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.676395 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.676435 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.676444 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.676460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.676469 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.778917 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.778949 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.778958 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.778971 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.778981 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.880738 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.880797 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.880806 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.880821 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.880831 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.982962 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.983001 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.983009 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.983024 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:00 crc kubenswrapper[4868]: I1003 12:51:00.983033 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:00Z","lastTransitionTime":"2025-10-03T12:51:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.084938 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.084981 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.084989 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.085007 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.085018 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.187404 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.187449 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.187460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.187477 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.187489 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.289768 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.289823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.289837 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.289859 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.289872 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.337305 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.337352 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.337364 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.337382 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.337392 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.350506 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:01Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.354269 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.354311 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.354321 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.354339 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.354348 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.366707 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:01Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.370664 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.370708 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.370720 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.370739 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.370751 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.386689 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:01Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.390079 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.390135 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.390155 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.390183 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.390196 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.403919 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:01Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.407870 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.407964 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.407979 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.407995 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.408004 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.424124 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:01Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.424292 4868 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.426083 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.426156 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.426172 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.426199 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.426215 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.529837 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.529911 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.529930 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.529953 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.529974 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.543317 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.543346 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.543547 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:01 crc kubenswrapper[4868]: E1003 12:51:01.543653 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.633933 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.633998 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.634012 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.634036 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.634070 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.737519 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.737591 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.737602 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.737618 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.737629 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.842141 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.842204 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.842216 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.842237 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.842256 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.944584 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.944631 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.944641 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.944659 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:01 crc kubenswrapper[4868]: I1003 12:51:01.944672 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:01Z","lastTransitionTime":"2025-10-03T12:51:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.047934 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.047985 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.047998 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.048018 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.048031 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.151777 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.151835 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.151848 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.151908 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.151925 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.254949 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.255000 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.255013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.255027 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.255036 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.357775 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.357829 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.357838 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.357856 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.357865 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.461563 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.461631 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.461643 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.461666 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.461679 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.544116 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:02 crc kubenswrapper[4868]: E1003 12:51:02.544289 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.544138 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:02 crc kubenswrapper[4868]: E1003 12:51:02.544405 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.565548 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.565629 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.565649 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.565681 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.565702 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.668635 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.668702 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.668720 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.668750 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.668770 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.771781 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.771827 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.771836 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.771852 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.771861 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.875657 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.875751 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.875763 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.875786 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.875799 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.978741 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.978816 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.978831 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.978849 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:02 crc kubenswrapper[4868]: I1003 12:51:02.978860 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:02Z","lastTransitionTime":"2025-10-03T12:51:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.081712 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.081769 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.081789 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.081817 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.081840 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.183875 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.183917 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.183929 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.183947 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.183958 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.286435 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.286483 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.286493 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.286509 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.286520 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.389586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.389649 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.389659 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.389677 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.389689 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.492379 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.492444 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.492460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.492482 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.492519 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.543100 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.543127 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:03 crc kubenswrapper[4868]: E1003 12:51:03.543286 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:03 crc kubenswrapper[4868]: E1003 12:51:03.543452 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.594706 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.594749 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.594757 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.594772 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.594781 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.696844 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.696936 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.696951 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.696968 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.696980 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.800923 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.800994 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.801005 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.801026 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.801040 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.903221 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.903285 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.903305 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.903326 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:03 crc kubenswrapper[4868]: I1003 12:51:03.903339 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:03Z","lastTransitionTime":"2025-10-03T12:51:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.010813 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.010887 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.010906 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.010938 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.010958 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.114939 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.115003 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.115016 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.115035 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.115048 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.218221 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.218292 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.218307 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.218344 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.218363 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.321551 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.321627 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.321636 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.321655 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.321667 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.424758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.424840 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.424864 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.424896 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.424916 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.528146 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.528192 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.528200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.528215 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.528225 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.543108 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.543139 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:04 crc kubenswrapper[4868]: E1003 12:51:04.543220 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:04 crc kubenswrapper[4868]: E1003 12:51:04.543438 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.560699 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.577369 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.595268 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.610718 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.627182 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.632887 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.632947 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.632962 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.632983 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.633003 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.647080 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.668508 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"message\\\":\\\"m k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:51.031999 6414 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032137 6414 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032743 6414 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:50:51.032765 6414 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:50:51.032782 6414 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:50:51.032803 6414 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:50:51.032822 6414 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:50:51.032830 6414 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:50:51.032864 6414 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:50:51.032870 6414 factory.go:656] Stopping watch factory\\\\nI1003 12:50:51.032906 6414 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:50:51.032913 6414 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12:50:51.032882 6414 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.686541 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.703642 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.720534 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.736681 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.736772 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.736783 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.736805 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.736825 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.737892 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.752148 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.766565 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.778797 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.792855 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.804417 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.816170 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:04Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.838520 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.838573 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.838584 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.838600 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.838610 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.940910 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.940952 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.940961 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.940977 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:04 crc kubenswrapper[4868]: I1003 12:51:04.940987 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:04Z","lastTransitionTime":"2025-10-03T12:51:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.043335 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.043372 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.043383 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.043401 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.043414 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.145998 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.146044 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.146074 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.146091 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.146101 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.248240 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.248290 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.248303 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.248321 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.248333 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.350462 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.350519 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.350538 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.350570 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.350589 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.453756 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.453876 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.453916 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.453956 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.453983 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.543291 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.543370 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:05 crc kubenswrapper[4868]: E1003 12:51:05.543498 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:05 crc kubenswrapper[4868]: E1003 12:51:05.543629 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.556684 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.556720 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.556729 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.556743 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.556755 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.659005 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.659044 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.659084 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.659107 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.659120 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.761634 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.761673 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.761682 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.761696 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.761705 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.864638 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.864715 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.864733 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.864768 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.864782 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.967338 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.967367 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.967376 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.967389 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:05 crc kubenswrapper[4868]: I1003 12:51:05.967397 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:05Z","lastTransitionTime":"2025-10-03T12:51:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.070619 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.070657 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.070665 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.070680 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.070689 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.172397 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.172432 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.172443 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.172457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.172466 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.275720 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.275775 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.275785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.275808 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.275823 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.379321 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.379369 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.379381 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.379406 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.379417 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.482898 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.483271 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.483373 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.483471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.483600 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.544035 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.544370 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:06 crc kubenswrapper[4868]: E1003 12:51:06.544933 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:06 crc kubenswrapper[4868]: E1003 12:51:06.545251 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.545857 4868 scope.go:117] "RemoveContainer" containerID="e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.587920 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.588472 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.588487 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.588512 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.588527 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.692155 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.692219 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.692230 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.692255 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.692277 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.796285 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.796352 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.796363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.796387 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.796696 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.899906 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.899965 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.899979 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.899999 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.900015 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:06Z","lastTransitionTime":"2025-10-03T12:51:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:06 crc kubenswrapper[4868]: I1003 12:51:06.954646 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/1.log" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.003858 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.003932 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.003950 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.003979 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.003997 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.107230 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.107273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.107285 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.107309 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.107324 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.211193 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.211241 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.211252 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.211269 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.211281 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.314960 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.315016 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.315031 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.315074 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.315090 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.417640 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.417704 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.417715 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.417739 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.417755 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.521026 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.521101 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.521112 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.521129 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.521141 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.542968 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.543127 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:07 crc kubenswrapper[4868]: E1003 12:51:07.543177 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:07 crc kubenswrapper[4868]: E1003 12:51:07.543440 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.623826 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.623864 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.623876 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.623892 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.623903 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.726562 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.726611 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.726621 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.726636 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.726645 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.829397 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.829442 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.829453 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.829471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.829485 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.931614 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.931640 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.931647 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.931662 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.931672 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:07Z","lastTransitionTime":"2025-10-03T12:51:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.965777 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/1.log" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.968833 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d"} Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.969604 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:51:07 crc kubenswrapper[4868]: I1003 12:51:07.991525 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:07Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.009336 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.024493 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.034469 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.034527 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.034540 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.034560 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.034573 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.040691 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.055857 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.070452 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.082934 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.094688 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.107223 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.120117 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.133772 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.137200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.137231 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.137242 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.137258 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.137270 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.147477 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.164074 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.180514 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.192630 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.212285 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.239315 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.239341 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.239348 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.239361 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.239370 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.241000 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"message\\\":\\\"m k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:51.031999 6414 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032137 6414 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032743 6414 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:50:51.032765 6414 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:50:51.032782 6414 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:50:51.032803 6414 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:50:51.032822 6414 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:50:51.032830 6414 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:50:51.032864 6414 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:50:51.032870 6414 factory.go:656] Stopping watch factory\\\\nI1003 12:50:51.032906 6414 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:50:51.032913 6414 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12:50:51.032882 6414 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.341654 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.341693 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.341701 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.341715 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.341725 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.445133 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.445194 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.445209 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.445235 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.445252 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.543169 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.543206 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:08 crc kubenswrapper[4868]: E1003 12:51:08.543370 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:08 crc kubenswrapper[4868]: E1003 12:51:08.543649 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.547397 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.547429 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.547440 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.547457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.547484 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.650478 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.650514 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.650523 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.650536 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.650547 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.752845 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.752887 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.752898 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.752916 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.752930 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.854726 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.854770 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.854781 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.854796 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.854806 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.956835 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.956869 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.956878 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.956890 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.956900 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:08Z","lastTransitionTime":"2025-10-03T12:51:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.974762 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/2.log" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.975300 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/1.log" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.977709 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d" exitCode=1 Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.977749 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d"} Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.977781 4868 scope.go:117] "RemoveContainer" containerID="e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.978509 4868 scope.go:117] "RemoveContainer" containerID="6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d" Oct 03 12:51:08 crc kubenswrapper[4868]: E1003 12:51:08.978906 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:51:08 crc kubenswrapper[4868]: I1003 12:51:08.989806 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.002165 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:08Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.012686 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.026993 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.036795 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.050119 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.060072 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.060118 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.060129 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.060146 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.060158 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.066519 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.082500 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.103282 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.117779 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.132141 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.161361 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e76db4bf3fc2457058981b7c3df4ecaefaf90b196ce25113dbd5dc2d3a7630d5\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"message\\\":\\\"m k8s.io/client-go/informers/factory.go:160\\\\nI1003 12:50:51.031999 6414 reflector.go:311] Stopping reflector *v1alpha1.BaselineAdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032137 6414 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI1003 12:50:51.032743 6414 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:50:51.032765 6414 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:50:51.032782 6414 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:50:51.032803 6414 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:50:51.032822 6414 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:50:51.032830 6414 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:50:51.032864 6414 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:50:51.032870 6414 factory.go:656] Stopping watch factory\\\\nI1003 12:50:51.032906 6414 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:50:51.032913 6414 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12:50:51.032882 6414 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.164408 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.164453 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.164466 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.164488 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.164501 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.178783 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.205500 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.229893 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.249337 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.265691 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.267758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.267798 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.267808 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.267826 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.267837 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.370440 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.370486 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.370497 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.370514 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.370523 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.473586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.473636 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.473649 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.473667 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.473677 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.543217 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.543224 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:09 crc kubenswrapper[4868]: E1003 12:51:09.543377 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:09 crc kubenswrapper[4868]: E1003 12:51:09.543571 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.556754 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.577479 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.577539 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.577554 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.577570 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.577583 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.679593 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.679642 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.679655 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.679674 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.679687 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.781742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.781786 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.781801 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.781819 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.781832 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.884504 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.884541 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.884552 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.884568 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.884579 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.982299 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/2.log" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.984975 4868 scope.go:117] "RemoveContainer" containerID="6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d" Oct 03 12:51:09 crc kubenswrapper[4868]: E1003 12:51:09.985103 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.985869 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.985891 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.985899 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.985909 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:09 crc kubenswrapper[4868]: I1003 12:51:09.985918 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:09Z","lastTransitionTime":"2025-10-03T12:51:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.000479 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:09Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.014231 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.027238 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.044321 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.060803 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.074968 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.088394 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.088849 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.088889 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.088897 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.088912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.088923 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.101747 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.114242 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.125836 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.140593 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.154635 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.167405 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.185772 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.191285 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.191328 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.191338 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.191359 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.191374 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.207653 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.218581 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.230663 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.240922 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:10Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.293662 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.293694 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.293703 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.293718 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.293729 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.396791 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.397636 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.397730 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.397820 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.397881 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.501159 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.501209 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.501219 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.501237 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.501247 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.543488 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.543567 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:10 crc kubenswrapper[4868]: E1003 12:51:10.543705 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:10 crc kubenswrapper[4868]: E1003 12:51:10.543807 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.604246 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.604314 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.604337 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.604370 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.604393 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.707673 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.707759 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.707801 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.707829 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.707908 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.811419 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.811832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.811953 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.812088 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.812189 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.914336 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.914580 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.914650 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.914711 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:10 crc kubenswrapper[4868]: I1003 12:51:10.914768 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:10Z","lastTransitionTime":"2025-10-03T12:51:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.018601 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.018692 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.018707 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.018730 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.018746 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.121397 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.121453 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.121464 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.121484 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.121496 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.224204 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.224275 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.224289 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.224316 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.224330 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.327233 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.327706 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.327804 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.327906 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.327985 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.430690 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.430760 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.430774 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.430799 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.430820 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.533993 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.534097 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.534111 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.534133 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.534145 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.543351 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.543547 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.543695 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.543816 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.573471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.573524 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.573536 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.573553 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.573565 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.588938 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:11Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.593464 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.593504 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.593515 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.593533 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.593546 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.607991 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:11Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.612774 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.612809 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.612822 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.612839 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.612850 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.624608 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:11Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.627747 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.627798 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.627807 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.627823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.627833 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.641718 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:11Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.645554 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.645597 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.645607 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.645623 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.645633 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.660169 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:11Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.660283 4868 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.662742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.662785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.662797 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.662816 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.662827 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.689679 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.689939 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:51:11 crc kubenswrapper[4868]: E1003 12:51:11.690084 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:51:43.69002998 +0000 UTC m=+99.899879116 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.768367 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.768427 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.768450 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.768481 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.768500 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.871298 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.871340 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.871349 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.871365 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.871375 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.973721 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.973779 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.973787 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.973802 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:11 crc kubenswrapper[4868]: I1003 12:51:11.973812 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:11Z","lastTransitionTime":"2025-10-03T12:51:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.076535 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.076596 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.076609 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.076631 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.076643 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.179109 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.179151 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.179160 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.179179 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.179190 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.282021 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.282095 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.282110 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.282134 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.282148 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.384966 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.385013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.385023 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.385038 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.385074 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.487554 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.487602 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.487611 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.487628 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.487639 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.543798 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.543837 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:12 crc kubenswrapper[4868]: E1003 12:51:12.544015 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:12 crc kubenswrapper[4868]: E1003 12:51:12.544159 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.589908 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.589970 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.589980 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.589995 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.590006 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.693224 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.693261 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.693271 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.693289 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.693301 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.796128 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.796218 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.796231 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.796251 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.796270 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.899315 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.899384 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.899425 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.899450 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:12 crc kubenswrapper[4868]: I1003 12:51:12.899464 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:12Z","lastTransitionTime":"2025-10-03T12:51:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.001199 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.001246 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.001261 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.001279 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.001295 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.103739 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.103793 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.103805 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.103829 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.103843 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.206317 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.206612 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.206749 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.206969 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.207087 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.310163 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.310201 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.310211 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.310229 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.310241 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.413440 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.413895 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.414190 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.414619 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.414807 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.517267 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.518183 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.518429 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.518548 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.518635 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.543895 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.544510 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:13 crc kubenswrapper[4868]: E1003 12:51:13.544631 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:13 crc kubenswrapper[4868]: E1003 12:51:13.544899 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.622158 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.622241 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.622251 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.622266 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.622277 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.725218 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.725269 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.725281 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.725299 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.725312 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.828489 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.828560 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.828574 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.828596 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.828614 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.931123 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.931169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.931178 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.931192 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:13 crc kubenswrapper[4868]: I1003 12:51:13.931201 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:13Z","lastTransitionTime":"2025-10-03T12:51:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.033830 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.033871 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.033882 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.033898 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.033907 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.136735 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.136785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.136798 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.136816 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.136829 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.240085 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.240119 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.240128 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.240143 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.240154 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.343851 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.343925 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.343936 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.343957 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.343972 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.446774 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.446817 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.446827 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.446846 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.446857 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.544013 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:14 crc kubenswrapper[4868]: E1003 12:51:14.544154 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.544228 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:14 crc kubenswrapper[4868]: E1003 12:51:14.544354 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.549065 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.549098 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.549114 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.549128 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.549139 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.561778 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.571764 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.588877 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.604767 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.621208 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.634091 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.651765 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.651812 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.651826 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.651846 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.651858 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.656714 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.670432 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.681998 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.696341 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.712636 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.727221 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.741173 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.754609 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.754658 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.754697 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.754717 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.754729 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.756104 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.769340 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.785731 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.803947 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.817626 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:14Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.857948 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.857986 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.857995 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.858009 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.858020 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.961222 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.961273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.961300 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.961318 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:14 crc kubenswrapper[4868]: I1003 12:51:14.961327 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:14Z","lastTransitionTime":"2025-10-03T12:51:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.069329 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.069372 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.069382 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.069400 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.069412 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.172418 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.172463 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.172473 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.172492 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.172503 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.275657 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.275918 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.276091 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.276200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.276302 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.378862 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.378899 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.378909 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.378924 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.378933 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.481613 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.481660 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.481674 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.481689 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.481699 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.543884 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:15 crc kubenswrapper[4868]: E1003 12:51:15.544080 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.544173 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:15 crc kubenswrapper[4868]: E1003 12:51:15.544222 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.583847 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.583888 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.583897 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.583912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.583921 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.686996 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.687087 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.687101 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.687120 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.687131 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.790153 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.790212 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.790224 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.790245 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.790259 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.892941 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.893012 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.893026 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.893081 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.893100 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.996194 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.996260 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.996292 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.996317 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:15 crc kubenswrapper[4868]: I1003 12:51:15.996331 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:15Z","lastTransitionTime":"2025-10-03T12:51:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.099221 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.099272 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.099283 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.099299 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.099312 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.206692 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.206726 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.206738 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.206757 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.206769 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.310649 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.310724 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.310738 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.310763 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.310779 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.412742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.412779 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.412790 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.412807 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.412819 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.515484 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.515538 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.515547 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.515563 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.515574 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.543597 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.543620 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:16 crc kubenswrapper[4868]: E1003 12:51:16.543833 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:16 crc kubenswrapper[4868]: E1003 12:51:16.543963 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.617451 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.617489 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.617497 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.617511 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.617520 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.720801 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.720844 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.720859 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.720874 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.720884 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.823280 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.823318 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.823328 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.823344 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.823354 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.925455 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.925521 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.925532 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.925556 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:16 crc kubenswrapper[4868]: I1003 12:51:16.925573 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:16Z","lastTransitionTime":"2025-10-03T12:51:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.028357 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.028446 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.028457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.028477 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.028486 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.130519 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.130567 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.130578 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.130597 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.130611 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.233870 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.233918 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.233933 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.233951 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.233961 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.337029 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.337117 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.337132 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.337156 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.337173 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.440023 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.440091 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.440104 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.440121 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.440132 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.542390 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.542443 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.542454 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.542475 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.542485 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.542958 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.542980 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:17 crc kubenswrapper[4868]: E1003 12:51:17.543142 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:17 crc kubenswrapper[4868]: E1003 12:51:17.543224 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.645718 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.645777 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.645793 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.645821 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.645838 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.749771 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.749837 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.749848 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.749871 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.749891 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.853388 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.853441 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.853452 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.853474 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.853488 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.956749 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.956800 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.956812 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.956831 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:17 crc kubenswrapper[4868]: I1003 12:51:17.956843 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:17Z","lastTransitionTime":"2025-10-03T12:51:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.059851 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.059904 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.059924 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.059945 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.059957 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.162846 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.162947 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.162974 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.163012 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.163037 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.265334 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.265390 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.265400 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.265417 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.265430 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.368268 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.368336 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.368346 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.368364 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.368377 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.470552 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.470607 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.470620 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.470637 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.470648 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.543293 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.543368 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:18 crc kubenswrapper[4868]: E1003 12:51:18.543437 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:18 crc kubenswrapper[4868]: E1003 12:51:18.543737 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.573429 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.573478 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.573489 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.573505 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.573517 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.676292 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.676328 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.676340 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.676356 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.676367 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.779118 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.779169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.779184 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.779205 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.779220 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.882239 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.882313 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.882335 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.882368 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.882389 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.985450 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.985504 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.985519 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.985544 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:18 crc kubenswrapper[4868]: I1003 12:51:18.985560 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:18Z","lastTransitionTime":"2025-10-03T12:51:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.089238 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.089306 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.089319 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.089341 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.089358 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.191809 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.191847 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.191856 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.191869 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.191878 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.294452 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.294498 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.294512 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.294530 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.294543 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.396965 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.396999 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.397008 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.397024 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.397036 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.499785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.499888 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.499902 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.499925 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.499942 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.543363 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.543423 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:19 crc kubenswrapper[4868]: E1003 12:51:19.543573 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:19 crc kubenswrapper[4868]: E1003 12:51:19.543737 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.602129 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.602176 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.602195 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.602213 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.602224 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.704668 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.704723 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.704737 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.704757 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.704769 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.807734 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.807800 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.807814 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.807834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.807846 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.911654 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.911726 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.911740 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.911763 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:19 crc kubenswrapper[4868]: I1003 12:51:19.911777 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:19Z","lastTransitionTime":"2025-10-03T12:51:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.014928 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.014969 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.014977 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.014990 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.014999 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.118017 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.118091 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.118107 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.118130 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.118149 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.221369 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.221422 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.221434 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.221456 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.221471 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.324593 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.324664 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.324676 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.324697 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.324710 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.428592 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.428647 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.428664 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.428700 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.428714 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.531417 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.531487 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.531497 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.531514 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.531528 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.543906 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.543967 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:20 crc kubenswrapper[4868]: E1003 12:51:20.544153 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:20 crc kubenswrapper[4868]: E1003 12:51:20.544322 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.634079 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.634122 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.634132 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.634147 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.634157 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.737339 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.737413 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.737423 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.737440 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.737454 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.841468 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.841532 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.841550 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.841579 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.841598 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.944748 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.944823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.944842 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.944871 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:20 crc kubenswrapper[4868]: I1003 12:51:20.944895 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:20Z","lastTransitionTime":"2025-10-03T12:51:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.047525 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.047605 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.047626 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.047660 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.047687 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.150903 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.150939 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.150950 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.150967 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.150977 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.254475 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.254526 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.254539 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.254557 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.254571 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.356373 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.356412 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.356421 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.356435 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.356447 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.458691 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.458774 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.458792 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.458823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.458840 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.543964 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.544034 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.544125 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.544214 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.561887 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.561942 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.561954 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.561972 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.561986 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.665116 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.665178 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.665201 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.665234 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.665255 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.683464 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.683501 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.683511 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.683550 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.683565 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.695170 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:21Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.698944 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.698981 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.698993 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.699009 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.699021 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.709904 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:21Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.713578 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.713616 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.713631 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.713645 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.713655 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.725534 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:21Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.728801 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.728834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.728895 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.728911 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.728920 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.740101 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:21Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.743582 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.743628 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.743640 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.743652 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.743662 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.758146 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:21Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:21Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:21 crc kubenswrapper[4868]: E1003 12:51:21.758308 4868 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.767793 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.767850 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.767862 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.767877 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.767889 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.870108 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.870146 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.870154 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.870169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.870181 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.972854 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.972888 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.972897 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.972909 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:21 crc kubenswrapper[4868]: I1003 12:51:21.972918 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:21Z","lastTransitionTime":"2025-10-03T12:51:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.074912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.074968 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.074978 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.074992 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.075001 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.176913 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.176985 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.177004 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.177024 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.177035 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.283738 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.283822 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.283831 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.283847 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.283860 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.391345 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.391434 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.391453 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.391475 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.391492 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.493724 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.493775 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.493784 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.493798 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.493807 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.543590 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:22 crc kubenswrapper[4868]: E1003 12:51:22.543753 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.543784 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:22 crc kubenswrapper[4868]: E1003 12:51:22.544271 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.544574 4868 scope.go:117] "RemoveContainer" containerID="6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d" Oct 03 12:51:22 crc kubenswrapper[4868]: E1003 12:51:22.544845 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.596119 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.596163 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.596172 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.596187 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.596195 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.698802 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.698847 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.698859 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.698876 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.698888 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.801660 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.802403 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.802499 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.802591 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.802676 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.905046 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.905100 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.905108 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.905121 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:22 crc kubenswrapper[4868]: I1003 12:51:22.905130 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:22Z","lastTransitionTime":"2025-10-03T12:51:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.007540 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.007586 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.007598 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.007614 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.007627 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.109959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.110013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.110028 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.110045 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.110086 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.213238 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.213314 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.213337 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.213402 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.213426 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.316713 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.316758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.316772 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.316795 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.316809 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.419111 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.419150 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.419158 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.419173 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.419183 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.521034 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.521079 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.521091 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.521107 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.521118 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.543517 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:23 crc kubenswrapper[4868]: E1003 12:51:23.543738 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.543517 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:23 crc kubenswrapper[4868]: E1003 12:51:23.544157 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.623616 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.623659 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.623671 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.623708 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.623719 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.725972 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.726015 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.726028 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.726045 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.726079 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.828742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.828783 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.828794 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.828811 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.828823 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.931082 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.931114 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.931124 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.931137 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:23 crc kubenswrapper[4868]: I1003 12:51:23.931146 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:23Z","lastTransitionTime":"2025-10-03T12:51:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.033474 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.033509 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.033522 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.033542 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.033573 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.136014 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.136332 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.136436 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.136559 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.136646 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.239172 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.239210 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.239220 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.239235 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.239245 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.341743 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.341785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.341796 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.341814 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.341826 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.444005 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.444035 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.444043 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.444079 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.444089 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.543192 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:24 crc kubenswrapper[4868]: E1003 12:51:24.543344 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.543390 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:24 crc kubenswrapper[4868]: E1003 12:51:24.543602 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.546468 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.546518 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.546530 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.546546 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.546561 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.563619 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.576470 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.595977 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.611327 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.628130 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.649545 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.649678 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.649891 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.649948 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.649966 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.652979 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.669010 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.686915 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.698950 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.712429 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.728522 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.741615 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.753650 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.753716 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.753734 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.753758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.753775 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.754130 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.772579 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.789631 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.805907 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.824186 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.846793 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:24Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.857472 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.857530 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.857561 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.857587 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.857607 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.960049 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.960113 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.960120 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.960135 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:24 crc kubenswrapper[4868]: I1003 12:51:24.960145 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:24Z","lastTransitionTime":"2025-10-03T12:51:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.063375 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.063429 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.063442 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.063465 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.063480 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.165619 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.165670 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.165682 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.165699 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.165711 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.269273 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.269749 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.269977 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.270182 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.270347 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.373981 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.374107 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.374131 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.374163 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.374186 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.478502 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.478664 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.478687 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.478717 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.478736 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.543818 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.543854 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:25 crc kubenswrapper[4868]: E1003 12:51:25.543980 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:25 crc kubenswrapper[4868]: E1003 12:51:25.544090 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.582192 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.582354 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.582381 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.582409 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.582432 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.686557 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.687150 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.687184 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.687213 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.687232 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.791029 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.791128 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.791145 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.791180 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.791197 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.895254 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.895303 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.895313 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.895331 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.895342 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.998167 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.998202 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.998211 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.998227 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:25 crc kubenswrapper[4868]: I1003 12:51:25.998237 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:25Z","lastTransitionTime":"2025-10-03T12:51:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.101248 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.101285 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.101294 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.101311 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.101321 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.204359 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.204410 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.204428 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.204455 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.204475 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.308334 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.308418 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.308432 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.308457 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.308473 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.411840 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.411910 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.411930 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.411959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.411980 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.516487 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.517032 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.517045 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.517099 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.517111 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.543448 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.543490 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:26 crc kubenswrapper[4868]: E1003 12:51:26.543693 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:26 crc kubenswrapper[4868]: E1003 12:51:26.543896 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.620858 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.620938 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.620958 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.620987 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.621009 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.724402 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.724458 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.724471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.724492 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.724505 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.827075 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.827123 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.827132 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.827150 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.827158 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.932437 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.932514 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.932528 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.932619 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:26 crc kubenswrapper[4868]: I1003 12:51:26.932631 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:26Z","lastTransitionTime":"2025-10-03T12:51:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.036799 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.036888 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.036913 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.036950 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.036974 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.140680 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.140738 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.140757 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.140786 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.140807 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.244570 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.244610 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.244620 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.244635 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.244648 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.348368 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.348436 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.348454 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.348479 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.348503 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.451595 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.451664 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.451687 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.451719 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.451739 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.543274 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.543376 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:27 crc kubenswrapper[4868]: E1003 12:51:27.543612 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:27 crc kubenswrapper[4868]: E1003 12:51:27.543908 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.555492 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.555564 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.555581 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.555612 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.555637 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.658912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.658997 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.659013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.659033 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.659078 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.762494 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.762587 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.762611 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.762645 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.762674 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.866507 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.866566 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.866590 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.866620 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.866639 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.970277 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.970333 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.970371 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.970392 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:27 crc kubenswrapper[4868]: I1003 12:51:27.970404 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:27Z","lastTransitionTime":"2025-10-03T12:51:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.072934 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.073004 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.073023 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.073084 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.073104 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.176649 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.176734 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.176753 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.176783 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.176803 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.282200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.282328 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.282346 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.282373 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.282394 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.385852 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.385921 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.385931 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.385949 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.385961 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.476251 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.476435 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476495 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:32.476461061 +0000 UTC m=+148.686310127 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.476567 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.476639 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.476663 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476673 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476727 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476752 4868 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476841 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476850 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-10-03 12:52:32.47681974 +0000 UTC m=+148.686668836 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476862 4868 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476837 4868 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476879 4868 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476912 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:52:32.476898072 +0000 UTC m=+148.686747168 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.476938 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-10-03 12:52:32.476925143 +0000 UTC m=+148.686774239 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.477030 4868 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.477337 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-10-03 12:52:32.477252881 +0000 UTC m=+148.687101997 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.489834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.489912 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.489934 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.489967 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.489995 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.544126 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.544190 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.544404 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:28 crc kubenswrapper[4868]: E1003 12:51:28.544532 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.593049 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.593157 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.593177 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.593205 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.593227 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.696294 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.696356 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.696368 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.696390 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.696405 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.802874 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.803016 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.803040 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.803093 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.803117 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.906400 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.906481 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.906500 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.906540 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:28 crc kubenswrapper[4868]: I1003 12:51:28.906570 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:28Z","lastTransitionTime":"2025-10-03T12:51:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.010368 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.010442 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.010460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.010508 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.010530 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.114785 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.114841 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.114860 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.114889 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.114910 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.218177 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.218247 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.218313 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.218342 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.218400 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.322692 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.322762 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.322782 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.322815 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.322837 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.426501 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.426600 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.426629 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.426667 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.426696 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.530371 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.530440 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.530460 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.530489 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.530510 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.543622 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.543652 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:29 crc kubenswrapper[4868]: E1003 12:51:29.543805 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:29 crc kubenswrapper[4868]: E1003 12:51:29.543933 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.634013 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.634097 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.634114 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.634140 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.634156 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.737997 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.738139 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.738165 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.738209 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.738239 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.841622 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.841700 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.841717 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.841745 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.841771 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.944852 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.944922 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.944941 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.944968 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:29 crc kubenswrapper[4868]: I1003 12:51:29.944988 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:29Z","lastTransitionTime":"2025-10-03T12:51:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.047858 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.047981 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.048000 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.048114 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.048140 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.151281 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.151377 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.151396 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.151422 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.151436 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.254953 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.255037 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.255099 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.255134 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.255158 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.359169 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.359215 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.359228 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.359245 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.359255 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.462951 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.463018 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.463037 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.463096 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.463118 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.543595 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:30 crc kubenswrapper[4868]: E1003 12:51:30.543901 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.544651 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:30 crc kubenswrapper[4868]: E1003 12:51:30.544839 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.565571 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.565637 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.565676 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.565705 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.565728 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.668986 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.669046 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.669079 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.669098 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.669108 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.772095 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.772154 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.772168 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.772187 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.772208 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.875859 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.875936 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.875961 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.875995 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.876023 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.979631 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.979706 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.979719 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.979761 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:30 crc kubenswrapper[4868]: I1003 12:51:30.979779 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:30Z","lastTransitionTime":"2025-10-03T12:51:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.082731 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.082797 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.082815 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.082841 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.082860 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.186387 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.186461 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.186526 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.186569 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.186586 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.289466 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.289578 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.289601 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.289635 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.289662 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.392816 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.392894 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.392918 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.392952 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.392983 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.495887 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.495959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.495974 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.495997 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.496015 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.543870 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.544101 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:31 crc kubenswrapper[4868]: E1003 12:51:31.544204 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:31 crc kubenswrapper[4868]: E1003 12:51:31.544297 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.598562 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.598618 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.598627 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.598641 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.598669 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.701648 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.701699 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.701711 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.701730 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.701742 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.804162 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.804221 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.804233 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.804252 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.804265 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.907732 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.907794 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.907809 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.907824 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:31 crc kubenswrapper[4868]: I1003 12:51:31.907835 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:31Z","lastTransitionTime":"2025-10-03T12:51:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.011030 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.011113 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.011127 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.011144 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.011160 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.114435 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.114527 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.114579 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.114599 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.114613 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.129012 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.129063 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.129076 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.129091 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.129102 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.141901 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:32Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.146832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.146883 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.146895 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.146910 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.146920 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.165510 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:32Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.171956 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.172036 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.172107 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.172153 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.172177 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.189253 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:32Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.194667 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.194711 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.194721 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.194746 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.194759 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.210692 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:32Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.215451 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.215483 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.215493 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.215508 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.215519 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.228097 4868 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"29f6cb4f-8773-40be-94fe-aeed876e20ec\\\",\\\"systemUUID\\\":\\\"85afab4e-313e-432a-babc-46214b5eea5c\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:32Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.228242 4868 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.229997 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.230096 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.230112 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.230139 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.230157 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.333086 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.333139 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.333149 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.333174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.333188 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.437267 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.437328 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.437342 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.437369 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.437385 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.539771 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.539805 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.539813 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.539827 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.539866 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.543925 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.544022 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.544124 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:32 crc kubenswrapper[4868]: E1003 12:51:32.544280 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.642818 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.642884 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.642898 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.642916 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.642931 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.747263 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.747354 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.747375 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.747408 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.747437 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.850121 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.850174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.850188 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.850212 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.850237 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.954558 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.954632 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.954648 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.954731 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:32 crc kubenswrapper[4868]: I1003 12:51:32.954746 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:32Z","lastTransitionTime":"2025-10-03T12:51:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.057705 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.057788 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.057805 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.057832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.057849 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.161656 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.161706 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.161717 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.161737 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.161753 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.264606 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.264652 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.264660 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.264679 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.264689 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.368266 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.368334 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.368346 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.368370 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.368386 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.472276 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.472345 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.472364 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.472391 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.472411 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.543662 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.543761 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:33 crc kubenswrapper[4868]: E1003 12:51:33.543848 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:33 crc kubenswrapper[4868]: E1003 12:51:33.543923 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.577142 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.577205 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.577223 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.577247 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.577260 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.681427 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.681471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.681481 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.681502 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.681512 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.785173 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.785228 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.785249 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.785278 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.785301 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.888855 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.888907 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.888922 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.888939 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.888953 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.991515 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.991574 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.991589 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.991615 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:33 crc kubenswrapper[4868]: I1003 12:51:33.991630 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:33Z","lastTransitionTime":"2025-10-03T12:51:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.065361 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jpqwj_61cc9d5b-e515-469c-a472-190ebf3609a3/kube-multus/0.log" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.065427 4868 generic.go:334] "Generic (PLEG): container finished" podID="61cc9d5b-e515-469c-a472-190ebf3609a3" containerID="ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233" exitCode=1 Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.065479 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jpqwj" event={"ID":"61cc9d5b-e515-469c-a472-190ebf3609a3","Type":"ContainerDied","Data":"ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.066016 4868 scope.go:117] "RemoveContainer" containerID="ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.086775 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.095122 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.095182 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.095203 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.095241 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.095262 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.103018 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.121419 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.134692 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.150805 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.164513 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.186349 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.197891 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.197925 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.197934 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.197949 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.197960 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.206011 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:33Z\\\",\\\"message\\\":\\\"2025-10-03T12:50:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5\\\\n2025-10-03T12:50:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5 to /host/opt/cni/bin/\\\\n2025-10-03T12:50:41Z [verbose] multus-daemon started\\\\n2025-10-03T12:50:41Z [verbose] Readiness Indicator file check\\\\n2025-10-03T12:51:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.223582 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.242355 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.256800 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.281499 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.298819 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.303031 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.303094 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.303108 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.303130 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.303145 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.315970 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.332711 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.347424 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.364600 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.397174 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.406921 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.407008 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.407021 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.407040 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.407072 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.510741 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.510810 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.510823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.510850 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.510868 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.543732 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.543754 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:34 crc kubenswrapper[4868]: E1003 12:51:34.544012 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:34 crc kubenswrapper[4868]: E1003 12:51:34.544219 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.560667 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.578998 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.612966 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.614862 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.614916 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.614928 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.614947 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.614959 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.629433 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.645129 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.668121 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.687145 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.705457 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.718452 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.718982 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.719003 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.719022 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.719033 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.722028 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.739555 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.758184 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.781650 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.799745 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.815806 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.821613 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.821672 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.821685 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.821703 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.821714 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.832559 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.848753 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.865068 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:34Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:33Z\\\",\\\"message\\\":\\\"2025-10-03T12:50:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5\\\\n2025-10-03T12:50:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5 to /host/opt/cni/bin/\\\\n2025-10-03T12:50:41Z [verbose] multus-daemon started\\\\n2025-10-03T12:50:41Z [verbose] Readiness Indicator file check\\\\n2025-10-03T12:51:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.883862 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:34Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.924733 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.924800 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.924814 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.924844 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:34 crc kubenswrapper[4868]: I1003 12:51:34.924867 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:34Z","lastTransitionTime":"2025-10-03T12:51:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.028048 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.028122 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.028157 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.028177 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.028189 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.131740 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.131799 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.131813 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.131836 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.131852 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.234903 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.234954 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.234994 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.235015 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.235029 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.338702 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.338754 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.338764 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.338869 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.338895 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.443531 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.443592 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.443602 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.443623 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.443636 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.544496 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.544564 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:35 crc kubenswrapper[4868]: E1003 12:51:35.545280 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.545433 4868 scope.go:117] "RemoveContainer" containerID="6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d" Oct 03 12:51:35 crc kubenswrapper[4868]: E1003 12:51:35.545528 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.547944 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.548005 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.548029 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.548092 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.548118 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.652439 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.652494 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.652516 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.652550 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.652575 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.762799 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.762875 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.762892 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.762917 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.762932 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.867462 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.867532 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.867553 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.867575 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.867587 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.970527 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.970646 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.970663 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.970690 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:35 crc kubenswrapper[4868]: I1003 12:51:35.970707 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:35Z","lastTransitionTime":"2025-10-03T12:51:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.073103 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.073150 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.073161 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.073179 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.073192 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.075642 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jpqwj_61cc9d5b-e515-469c-a472-190ebf3609a3/kube-multus/0.log" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.075719 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jpqwj" event={"ID":"61cc9d5b-e515-469c-a472-190ebf3609a3","Type":"ContainerStarted","Data":"71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.091417 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.103259 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.123119 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.141026 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.159290 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.176313 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.176356 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.176367 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.176386 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.176397 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.181704 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.195545 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.212340 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.226108 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.240251 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.253737 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.266504 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.277323 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.279530 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.279599 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.279613 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.279635 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.279648 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.289891 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.344646 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.361505 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:33Z\\\",\\\"message\\\":\\\"2025-10-03T12:50:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5\\\\n2025-10-03T12:50:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5 to /host/opt/cni/bin/\\\\n2025-10-03T12:50:41Z [verbose] multus-daemon started\\\\n2025-10-03T12:50:41Z [verbose] Readiness Indicator file check\\\\n2025-10-03T12:51:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.375799 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.382240 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.382471 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.382486 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.382507 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.382523 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.387682 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:36Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.485212 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.485258 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.485267 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.485281 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.485293 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.543078 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.543125 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:36 crc kubenswrapper[4868]: E1003 12:51:36.543254 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:36 crc kubenswrapper[4868]: E1003 12:51:36.543339 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.587830 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.587879 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.587894 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.587915 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.587930 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.690481 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.690527 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.690536 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.690551 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.690562 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.793895 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.793946 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.793955 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.793978 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.793989 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.897033 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.897107 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.897123 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.897140 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.897150 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.999621 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.999668 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.999680 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.999696 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:36 crc kubenswrapper[4868]: I1003 12:51:36.999711 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:36Z","lastTransitionTime":"2025-10-03T12:51:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.080507 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/2.log" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.083857 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.084416 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.102157 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.102237 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.102262 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.102296 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.102319 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.103937 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:33Z\\\",\\\"message\\\":\\\"2025-10-03T12:50:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5\\\\n2025-10-03T12:50:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5 to /host/opt/cni/bin/\\\\n2025-10-03T12:50:41Z [verbose] multus-daemon started\\\\n2025-10-03T12:50:41Z [verbose] Readiness Indicator file check\\\\n2025-10-03T12:51:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.123294 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.146270 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.165662 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.182149 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.206612 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.206677 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.206694 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.206723 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.206742 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.214613 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.230937 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.252120 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.264586 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.278353 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.287006 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.296332 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.310156 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.310209 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.310223 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.310246 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.310430 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.312484 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.324392 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.336079 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.344794 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.355933 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.368039 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:37Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.413795 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.413842 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.413851 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.413866 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.413875 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.517725 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.517806 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.517832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.517861 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.517882 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.543922 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.543957 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:37 crc kubenswrapper[4868]: E1003 12:51:37.544095 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:37 crc kubenswrapper[4868]: E1003 12:51:37.544323 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.619877 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.619961 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.619971 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.619983 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.619992 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.722559 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.722612 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.722623 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.722637 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.722647 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.825404 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.825479 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.825495 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.825524 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.825546 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.928321 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.928385 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.928396 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.928421 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:37 crc kubenswrapper[4868]: I1003 12:51:37.928435 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:37Z","lastTransitionTime":"2025-10-03T12:51:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.031305 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.031429 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.031438 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.031453 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.031464 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.090144 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/3.log" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.090818 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/2.log" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.093793 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" exitCode=1 Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.093830 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.093898 4868 scope.go:117] "RemoveContainer" containerID="6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.095417 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 12:51:38 crc kubenswrapper[4868]: E1003 12:51:38.095856 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.112830 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.126838 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.133725 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.133783 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.133806 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.133834 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.133851 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.143102 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.161694 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.173990 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.185603 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.197684 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.209274 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.225532 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.235685 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.235736 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.235748 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.235766 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.235825 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.236628 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.249182 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.259931 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.270978 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:33Z\\\",\\\"message\\\":\\\"2025-10-03T12:50:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5\\\\n2025-10-03T12:50:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5 to /host/opt/cni/bin/\\\\n2025-10-03T12:50:41Z [verbose] multus-daemon started\\\\n2025-10-03T12:50:41Z [verbose] Readiness Indicator file check\\\\n2025-10-03T12:51:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.284552 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.296341 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.309231 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.318650 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.335075 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://6d76b00ba97a0a936c2e85259d2377be2b7c227097f7df27b014aeb0deaa296d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:08Z\\\",\\\"message\\\":\\\"emoval\\\\nI1003 12:51:08.441602 6608 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1003 12:51:08.441618 6608 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1003 12:51:08.441653 6608 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1003 12:51:08.442102 6608 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1003 12:51:08.442131 6608 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1003 12:51:08.442138 6608 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1003 12:51:08.442122 6608 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1003 12:51:08.442164 6608 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1003 12:51:08.442183 6608 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1003 12:51:08.442195 6608 handler.go:208] Removed *v1.Node event handler 2\\\\nI1003 12:51:08.442201 6608 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1003 12:51:08.442211 6608 handler.go:208] Removed *v1.Node event handler 7\\\\nI1003 12:51:08.442205 6608 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1003 12:51:08.442217 6608 factory.go:656] Stopping watch factory\\\\nI1003 12:51:08.442225 6608 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1003 12:51:08.442238 6608 ovnkube.go:599] Stopped ovnkube\\\\nI1003 12\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:37Z\\\",\\\"message\\\":\\\"try object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1003 12:51:36.962145 6986 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1003 12:51:36.962151 6986 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1003 12:51:36.962144 6986 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1003 12:51:36.962159 6986 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1003 12:51:36.962164 6986 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1003 12:51:36.962005 6986 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI1003 12:51:36.962180 6986 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nF1003 12:51:36.962243 6986 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initia\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:38Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.341636 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.341931 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.341942 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.341957 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.341968 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.444324 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.444370 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.444378 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.444393 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.444404 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.544002 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.544081 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:38 crc kubenswrapper[4868]: E1003 12:51:38.544154 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:38 crc kubenswrapper[4868]: E1003 12:51:38.544299 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.546620 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.546692 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.546703 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.546717 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.546728 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.650863 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.650902 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.650911 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.650924 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.650932 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.752767 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.752805 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.752816 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.752832 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.752843 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.855597 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.855647 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.855658 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.855673 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.855682 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.958297 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.958351 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.958362 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.958384 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:38 crc kubenswrapper[4868]: I1003 12:51:38.958399 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:38Z","lastTransitionTime":"2025-10-03T12:51:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.060765 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.060877 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.060892 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.060913 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.060924 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.098229 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/3.log" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.101040 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 12:51:39 crc kubenswrapper[4868]: E1003 12:51:39.101286 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.113101 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://09ddf4a762d5846cc72399e0ebaec624575d126f7a0b8d9b041a67a772b87cfb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://59db44886516bd96be8afc35bcb241ca075b99532dc4309c8c25ac6893d4789d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.127832 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cd420afb0d9ded18ceb4bdd4f932cbf0f31e3d28bb2dd2257ba9a57c2d6e9556\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.142367 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jpqwj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"61cc9d5b-e515-469c-a472-190ebf3609a3\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:33Z\\\",\\\"message\\\":\\\"2025-10-03T12:50:40+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5\\\\n2025-10-03T12:50:40+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_c6332cb3-c52a-4185-8790-d5cfa06e58c5 to /host/opt/cni/bin/\\\\n2025-10-03T12:50:41Z [verbose] multus-daemon started\\\\n2025-10-03T12:50:41Z [verbose] Readiness Indicator file check\\\\n2025-10-03T12:51:26Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:51:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-xwkrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jpqwj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.160298 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e2c273fa-527b-44a3-acd1-37e17e1f7d1c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0637a5f68f4758f1c94d9829b7823cf5fba96427135aca188625d0400abb9493\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4b4951242a540edad4e85e5ff5717a4f4e12fb2411f5f86cfb410a70a062cf1b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:34Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d07424c27be6f4a14b6d539596acb4cdf99d01021ef20a7b953c2decb22738b2\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0784534ecb5db42e930a03b0f154a47d3cb084e706a19f31454a05a658352899\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad5a642002f866ea5115e7927c789cbea0ec31904c2864dcc004ed4836c7672d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b4b22a3d99334ebb21c1ef914b75a46b2ca2081df1d7399efad5297a01568959\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3537272b14df580f472b5c88135f23d82270a120f706536d21dfb41d665b18c0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-z65d4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-cjm4x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.172573 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.172610 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.172619 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.172636 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.172648 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.178418 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9e7c8870-28ba-4290-b38f-a8a130c8cb9e\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f71d5400c2d2c275a07f3c58042ca675ec9ae576089fff238feb1c2e3fffc52\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://076cd2dc0d1c8f2ce75b186408d78406c41a21706a889f4dd35f642f72571738\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae03cee658d037597b3381e619104887b52ad0d460881a8bab3a05252ed4d6e0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e4e8f7d3f4a874d35b3126e0d0bd553668843562bae77a5a3d531b10faea785d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.190017 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cvsg7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6930e7fb-9970-470b-b08c-8560249f8597\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aac0b762eb171eda06dde052d528c7ddf221256ecd89e9a12cedb1cdc702d5b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-vjg6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:25Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cvsg7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.211029 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:33Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-10-03T12:51:37Z\\\",\\\"message\\\":\\\"try object setup: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1003 12:51:36.962145 6986 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1003 12:51:36.962151 6986 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-target-xd92c\\\\nI1003 12:51:36.962144 6986 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g\\\\nI1003 12:51:36.962159 6986 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-target-xd92c in node crc\\\\nI1003 12:51:36.962164 6986 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1003 12:51:36.962005 6986 obj_retry.go:386] Retry successful for *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc after 0 failed attempt(s)\\\\nI1003 12:51:36.962180 6986 default_network_controller.go:776] Recording success event on pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nF1003 12:51:36.962243 6986 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initia\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:51:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2skbw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-fgxcz\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.227340 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7ce330d9-09c6-4bf3-b485-ea0a34b8f32d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5de68d9a169cacaf6423c86f0ea90c7d1bf51ac14a8dc26393480cf8e687222a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d0890ee2b6143247ea8501dd4f25bd8e184c3b60366a157d053c87415ed20be2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-ld996\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:38Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wcttt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.240691 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"bd0c3e6d-2e13-44fe-a5f3-4b01478a19c2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://86f8fd4e7a9878211672be4e0dcd4545071453d7083aeeaa44981b3dba80348c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ee59a4213178befa2422b1b766097651289ff0a1dea31598ab89109e87f3d0c6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.254657 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"50850efa-265a-47b3-b06c-fed65506c07f\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c81ffbd7b5911728e717a38e69ea59ed35d93a397f83ca29c252819fab736b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://bfcbb0a1200ceb0929ae7b96a8aeea7ef42c8b9e249c460650c99aa1e8201a5d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cbcd542a1a7eb1213c1f2e3f20fe7a420931c51472e00c155dc9230ccff02438\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8e7aa96accae62a5fed8463a80f89e042b2861d4d2043db96393dcdeaa1bfaf2\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8ddef03fede9b4a7fb5fb93dd64dcd3a3204e26c21a3152c95c680947c5b51de\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"le observer\\\\nW1003 12:50:23.646272 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI1003 12:50:23.646433 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1003 12:50:23.647208 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2182491551/tls.crt::/tmp/serving-cert-2182491551/tls.key\\\\\\\"\\\\nI1003 12:50:23.821207 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1003 12:50:23.824725 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1003 12:50:23.824747 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1003 12:50:23.824772 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1003 12:50:23.824778 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1003 12:50:23.834789 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI1003 12:50:23.834806 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW1003 12:50:23.834829 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834836 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1003 12:50:23.834843 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1003 12:50:23.834847 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1003 12:50:23.834853 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1003 12:50:23.834857 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF1003 12:50:23.838798 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:08Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b1cb9efb116b6a9ee7569c728944df7df4ca08b0548bbf1c557dd48210087d25\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ead517d9e1dc36ab446204e4c190ec6caec70f9cb7a1b36f5821254df87cc5cb\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.267765 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:30Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ef46a89772a3cd19ce1707e30c7350461e35a18c8992aeb6d4d2f2fdaec08665\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.275449 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.275513 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.275527 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.275545 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.275557 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.280025 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.290726 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"4fc2d690-5dcc-4f98-8607-0b3909f44c23\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:39Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4rctt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:39Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-nwqvb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.302361 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"497ac41c-0c82-4342-bac1-34983a65a052\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://bfcfee7844b55fb5835eaf08d543ec3472a29a9085d1379d6ac45975171081a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8def8198f1539536938c6f46e9c91b8a5b0e1e0fa64b49783d4a2d3fabbdbe2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://37395f57fb82d870ecd64e33753a911423634acb095c962c9a05a8aea3f832c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d1c5cf627e7b642f8130445463367ef3937bee585ebe4e0934ad33fa4bca58e5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-10-03T12:50:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-10-03T12:50:05Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:04Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.314867 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.329723 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:24Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.342968 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"71ca0541-cbbf-4390-b90e-f068349a51f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2ea0ee5cdbd89e2a0d8dda147a8600595de91d3234ca42fd9377c3c0d696a277\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:34Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:30Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wxgpt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:26Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-kbwqg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.355854 4868 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-597z4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8948782-cb43-4adf-a2a3-c5c22a3f1254\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-10-03T12:50:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8f4d7baf6e643284edc10d0b785f681e3dc2fa7e40e56249bf512ad919b37c82\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-10-03T12:50:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mqv45\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-10-03T12:50:28Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-597z4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-10-03T12:51:39Z is after 2025-08-24T17:21:41Z" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.378082 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.378143 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.378156 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.378174 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.378185 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.482263 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.482300 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.482309 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.482325 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.482337 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.543450 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:39 crc kubenswrapper[4868]: E1003 12:51:39.543589 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.543752 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:39 crc kubenswrapper[4868]: E1003 12:51:39.544169 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.584997 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.585045 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.585069 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.585084 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.585094 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.688191 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.688283 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.688308 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.688344 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.688368 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.792285 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.792364 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.792385 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.792409 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.792427 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.896109 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.896194 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.896211 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.896250 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:39 crc kubenswrapper[4868]: I1003 12:51:39.896271 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:39Z","lastTransitionTime":"2025-10-03T12:51:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.004468 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.004625 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.004647 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.004679 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.004694 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.108522 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.108604 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.108617 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.108636 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.108672 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.211971 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.212028 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.212044 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.212084 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.212104 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.314668 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.314729 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.314742 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.314762 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.315029 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.417441 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.417485 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.417515 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.417528 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.417537 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.520813 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.520890 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.520904 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.520929 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.520943 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.544266 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.544580 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:40 crc kubenswrapper[4868]: E1003 12:51:40.544926 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:40 crc kubenswrapper[4868]: E1003 12:51:40.545078 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.624133 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.624185 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.624200 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.624222 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.624236 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.727618 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.727692 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.727715 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.727746 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.727769 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.830178 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.830262 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.830276 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.830294 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.830305 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.933758 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.933833 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.933857 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.933895 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:40 crc kubenswrapper[4868]: I1003 12:51:40.933921 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:40Z","lastTransitionTime":"2025-10-03T12:51:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.038223 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.038294 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.038314 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.038346 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.038372 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.141403 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.141478 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.141504 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.141537 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.141565 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.244512 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.244831 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.244924 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.245019 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.245162 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.348238 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.348783 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.348864 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.348959 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.349038 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.451939 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.453238 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.453504 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.453585 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.453660 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.543312 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.543496 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:41 crc kubenswrapper[4868]: E1003 12:51:41.543653 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:41 crc kubenswrapper[4868]: E1003 12:51:41.543868 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.556951 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.556993 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.557003 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.557020 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.557032 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.660613 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.660669 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.660687 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.660713 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.660728 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.763136 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.763172 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.763181 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.763196 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.763206 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.865898 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.865945 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.865954 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.865968 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.865982 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.968881 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.969337 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.969416 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.969447 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:41 crc kubenswrapper[4868]: I1003 12:51:41.969471 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:41Z","lastTransitionTime":"2025-10-03T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.072363 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.072417 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.072434 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.072452 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.072464 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:42Z","lastTransitionTime":"2025-10-03T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.174639 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.174685 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.174699 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.174714 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.174724 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:42Z","lastTransitionTime":"2025-10-03T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.276983 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.277041 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.277083 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.277104 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.277120 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:42Z","lastTransitionTime":"2025-10-03T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.303447 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.303503 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.303513 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.303530 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.303542 4868 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-10-03T12:51:42Z","lastTransitionTime":"2025-10-03T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.349622 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l"] Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.350080 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.351905 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.352140 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.352747 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.355694 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.385794 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=51.385762491 podStartE2EDuration="51.385762491s" podCreationTimestamp="2025-10-03 12:50:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.372155543 +0000 UTC m=+98.582004619" watchObservedRunningTime="2025-10-03 12:51:42.385762491 +0000 UTC m=+98.595611567" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.415771 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podStartSLOduration=77.415749229 podStartE2EDuration="1m17.415749229s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.414106196 +0000 UTC m=+98.623955282" watchObservedRunningTime="2025-10-03 12:51:42.415749229 +0000 UTC m=+98.625598295" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.440287 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0aefc145-7400-418c-9b19-76bab65c02a7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.440363 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0aefc145-7400-418c-9b19-76bab65c02a7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.440410 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0aefc145-7400-418c-9b19-76bab65c02a7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.440519 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aefc145-7400-418c-9b19-76bab65c02a7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.440687 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0aefc145-7400-418c-9b19-76bab65c02a7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.443713 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-597z4" podStartSLOduration=78.443676193 podStartE2EDuration="1m18.443676193s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.429280325 +0000 UTC m=+98.639129411" watchObservedRunningTime="2025-10-03 12:51:42.443676193 +0000 UTC m=+98.653525259" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.495417 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-cjm4x" podStartSLOduration=77.495385583 podStartE2EDuration="1m17.495385583s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.494768537 +0000 UTC m=+98.704617613" watchObservedRunningTime="2025-10-03 12:51:42.495385583 +0000 UTC m=+98.705234669" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.495740 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jpqwj" podStartSLOduration=77.495733932 podStartE2EDuration="1m17.495733932s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.475534071 +0000 UTC m=+98.685383137" watchObservedRunningTime="2025-10-03 12:51:42.495733932 +0000 UTC m=+98.705582998" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.515850 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=79.515832081 podStartE2EDuration="1m19.515832081s" podCreationTimestamp="2025-10-03 12:50:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.51579309 +0000 UTC m=+98.725642166" watchObservedRunningTime="2025-10-03 12:51:42.515832081 +0000 UTC m=+98.725681157" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.529954 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-cvsg7" podStartSLOduration=78.529937621 podStartE2EDuration="1m18.529937621s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.529242612 +0000 UTC m=+98.739091688" watchObservedRunningTime="2025-10-03 12:51:42.529937621 +0000 UTC m=+98.739786687" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.541921 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0aefc145-7400-418c-9b19-76bab65c02a7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.541962 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0aefc145-7400-418c-9b19-76bab65c02a7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.541995 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0aefc145-7400-418c-9b19-76bab65c02a7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.542013 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aefc145-7400-418c-9b19-76bab65c02a7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.542039 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0aefc145-7400-418c-9b19-76bab65c02a7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.542085 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/0aefc145-7400-418c-9b19-76bab65c02a7-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.542117 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/0aefc145-7400-418c-9b19-76bab65c02a7-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.543042 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0aefc145-7400-418c-9b19-76bab65c02a7-service-ca\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.543195 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.543278 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:42 crc kubenswrapper[4868]: E1003 12:51:42.543395 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:42 crc kubenswrapper[4868]: E1003 12:51:42.543492 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.550622 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0aefc145-7400-418c-9b19-76bab65c02a7-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.565419 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0aefc145-7400-418c-9b19-76bab65c02a7-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-dcr7l\" (UID: \"0aefc145-7400-418c-9b19-76bab65c02a7\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.581719 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wcttt" podStartSLOduration=76.581665841 podStartE2EDuration="1m16.581665841s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.581506937 +0000 UTC m=+98.791356003" watchObservedRunningTime="2025-10-03 12:51:42.581665841 +0000 UTC m=+98.791514907" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.629952 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=33.629918619 podStartE2EDuration="33.629918619s" podCreationTimestamp="2025-10-03 12:51:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.612504992 +0000 UTC m=+98.822354078" watchObservedRunningTime="2025-10-03 12:51:42.629918619 +0000 UTC m=+98.839767705" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.630395 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.630390852 podStartE2EDuration="1m18.630390852s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:42.629358515 +0000 UTC m=+98.839207601" watchObservedRunningTime="2025-10-03 12:51:42.630390852 +0000 UTC m=+98.840239918" Oct 03 12:51:42 crc kubenswrapper[4868]: I1003 12:51:42.665535 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" Oct 03 12:51:43 crc kubenswrapper[4868]: I1003 12:51:43.116748 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" event={"ID":"0aefc145-7400-418c-9b19-76bab65c02a7","Type":"ContainerStarted","Data":"16326d7565f840b75e0fe157951171a34fa543bba2d007a86e5b3301351f40c0"} Oct 03 12:51:43 crc kubenswrapper[4868]: I1003 12:51:43.117014 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" event={"ID":"0aefc145-7400-418c-9b19-76bab65c02a7","Type":"ContainerStarted","Data":"65f0722ebbf7c4614f1af82e876e02ec75587bfc918a37da7cb9d14aeded9e38"} Oct 03 12:51:43 crc kubenswrapper[4868]: I1003 12:51:43.543672 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:43 crc kubenswrapper[4868]: I1003 12:51:43.543818 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:43 crc kubenswrapper[4868]: E1003 12:51:43.543846 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:43 crc kubenswrapper[4868]: E1003 12:51:43.544209 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:43 crc kubenswrapper[4868]: I1003 12:51:43.754851 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:43 crc kubenswrapper[4868]: E1003 12:51:43.754986 4868 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:51:43 crc kubenswrapper[4868]: E1003 12:51:43.755028 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs podName:4fc2d690-5dcc-4f98-8607-0b3909f44c23 nodeName:}" failed. No retries permitted until 2025-10-03 12:52:47.755014365 +0000 UTC m=+163.964863421 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs") pod "network-metrics-daemon-nwqvb" (UID: "4fc2d690-5dcc-4f98-8607-0b3909f44c23") : object "openshift-multus"/"metrics-daemon-secret" not registered Oct 03 12:51:44 crc kubenswrapper[4868]: I1003 12:51:44.543383 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:44 crc kubenswrapper[4868]: I1003 12:51:44.543410 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:44 crc kubenswrapper[4868]: E1003 12:51:44.544547 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:44 crc kubenswrapper[4868]: E1003 12:51:44.544619 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:45 crc kubenswrapper[4868]: I1003 12:51:45.543730 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:45 crc kubenswrapper[4868]: I1003 12:51:45.543779 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:45 crc kubenswrapper[4868]: E1003 12:51:45.543887 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:45 crc kubenswrapper[4868]: E1003 12:51:45.544032 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:46 crc kubenswrapper[4868]: I1003 12:51:46.543450 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:46 crc kubenswrapper[4868]: E1003 12:51:46.543587 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:46 crc kubenswrapper[4868]: I1003 12:51:46.543857 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:46 crc kubenswrapper[4868]: E1003 12:51:46.543923 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:47 crc kubenswrapper[4868]: I1003 12:51:47.543675 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:47 crc kubenswrapper[4868]: I1003 12:51:47.543721 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:47 crc kubenswrapper[4868]: E1003 12:51:47.544198 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:47 crc kubenswrapper[4868]: E1003 12:51:47.544371 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:48 crc kubenswrapper[4868]: I1003 12:51:48.543520 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:48 crc kubenswrapper[4868]: I1003 12:51:48.543523 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:48 crc kubenswrapper[4868]: E1003 12:51:48.543666 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:48 crc kubenswrapper[4868]: E1003 12:51:48.543738 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:49 crc kubenswrapper[4868]: I1003 12:51:49.543847 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:49 crc kubenswrapper[4868]: I1003 12:51:49.543914 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:49 crc kubenswrapper[4868]: E1003 12:51:49.543982 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:49 crc kubenswrapper[4868]: E1003 12:51:49.544134 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:50 crc kubenswrapper[4868]: I1003 12:51:50.543548 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:50 crc kubenswrapper[4868]: I1003 12:51:50.543548 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:50 crc kubenswrapper[4868]: E1003 12:51:50.543787 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:50 crc kubenswrapper[4868]: E1003 12:51:50.543914 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:51 crc kubenswrapper[4868]: I1003 12:51:51.543762 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:51 crc kubenswrapper[4868]: I1003 12:51:51.543778 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:51 crc kubenswrapper[4868]: E1003 12:51:51.543989 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:51 crc kubenswrapper[4868]: E1003 12:51:51.544766 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:51 crc kubenswrapper[4868]: I1003 12:51:51.593301 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-dcr7l" podStartSLOduration=86.593272296 podStartE2EDuration="1m26.593272296s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:43.136989568 +0000 UTC m=+99.346838684" watchObservedRunningTime="2025-10-03 12:51:51.593272296 +0000 UTC m=+107.803121372" Oct 03 12:51:51 crc kubenswrapper[4868]: I1003 12:51:51.594565 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Oct 03 12:51:52 crc kubenswrapper[4868]: I1003 12:51:52.543107 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:52 crc kubenswrapper[4868]: I1003 12:51:52.543107 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:52 crc kubenswrapper[4868]: E1003 12:51:52.543299 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:52 crc kubenswrapper[4868]: E1003 12:51:52.543454 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:53 crc kubenswrapper[4868]: I1003 12:51:53.543684 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:53 crc kubenswrapper[4868]: E1003 12:51:53.543844 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:53 crc kubenswrapper[4868]: I1003 12:51:53.543949 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:53 crc kubenswrapper[4868]: E1003 12:51:53.544305 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:53 crc kubenswrapper[4868]: I1003 12:51:53.544669 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 12:51:53 crc kubenswrapper[4868]: E1003 12:51:53.544992 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:51:54 crc kubenswrapper[4868]: I1003 12:51:54.544167 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:54 crc kubenswrapper[4868]: I1003 12:51:54.544199 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:54 crc kubenswrapper[4868]: E1003 12:51:54.545329 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:54 crc kubenswrapper[4868]: E1003 12:51:54.545445 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:55 crc kubenswrapper[4868]: I1003 12:51:55.543565 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:55 crc kubenswrapper[4868]: E1003 12:51:55.543699 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:55 crc kubenswrapper[4868]: I1003 12:51:55.543569 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:55 crc kubenswrapper[4868]: E1003 12:51:55.544149 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:56 crc kubenswrapper[4868]: I1003 12:51:56.543494 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:56 crc kubenswrapper[4868]: E1003 12:51:56.543629 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:56 crc kubenswrapper[4868]: I1003 12:51:56.543501 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:56 crc kubenswrapper[4868]: E1003 12:51:56.543730 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:57 crc kubenswrapper[4868]: I1003 12:51:57.543863 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:57 crc kubenswrapper[4868]: E1003 12:51:57.543990 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:57 crc kubenswrapper[4868]: I1003 12:51:57.543866 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:57 crc kubenswrapper[4868]: E1003 12:51:57.544192 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:51:58 crc kubenswrapper[4868]: I1003 12:51:58.543290 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:51:58 crc kubenswrapper[4868]: I1003 12:51:58.543582 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:51:58 crc kubenswrapper[4868]: E1003 12:51:58.543707 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:51:58 crc kubenswrapper[4868]: E1003 12:51:58.543930 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:51:59 crc kubenswrapper[4868]: I1003 12:51:59.543268 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:51:59 crc kubenswrapper[4868]: I1003 12:51:59.543280 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:51:59 crc kubenswrapper[4868]: E1003 12:51:59.543563 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:51:59 crc kubenswrapper[4868]: E1003 12:51:59.543723 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:00 crc kubenswrapper[4868]: I1003 12:52:00.543516 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:00 crc kubenswrapper[4868]: I1003 12:52:00.543588 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:00 crc kubenswrapper[4868]: E1003 12:52:00.543641 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:00 crc kubenswrapper[4868]: E1003 12:52:00.543736 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:01 crc kubenswrapper[4868]: I1003 12:52:01.542986 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:01 crc kubenswrapper[4868]: E1003 12:52:01.543147 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:01 crc kubenswrapper[4868]: I1003 12:52:01.543214 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:01 crc kubenswrapper[4868]: E1003 12:52:01.543434 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:02 crc kubenswrapper[4868]: I1003 12:52:02.544071 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:02 crc kubenswrapper[4868]: E1003 12:52:02.544223 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:02 crc kubenswrapper[4868]: I1003 12:52:02.544338 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:02 crc kubenswrapper[4868]: E1003 12:52:02.544465 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:03 crc kubenswrapper[4868]: I1003 12:52:03.543475 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:03 crc kubenswrapper[4868]: I1003 12:52:03.543472 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:03 crc kubenswrapper[4868]: E1003 12:52:03.543617 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:03 crc kubenswrapper[4868]: E1003 12:52:03.543693 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:04 crc kubenswrapper[4868]: E1003 12:52:04.535212 4868 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Oct 03 12:52:04 crc kubenswrapper[4868]: I1003 12:52:04.543108 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:04 crc kubenswrapper[4868]: I1003 12:52:04.544185 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:04 crc kubenswrapper[4868]: E1003 12:52:04.544289 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:04 crc kubenswrapper[4868]: E1003 12:52:04.544173 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:04 crc kubenswrapper[4868]: I1003 12:52:04.544987 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 12:52:04 crc kubenswrapper[4868]: E1003 12:52:04.545272 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-fgxcz_openshift-ovn-kubernetes(46b5db5d-3104-43ab-9ae7-080ec1f50ca9)\"" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" Oct 03 12:52:04 crc kubenswrapper[4868]: E1003 12:52:04.653485 4868 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 12:52:05 crc kubenswrapper[4868]: I1003 12:52:05.543855 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:05 crc kubenswrapper[4868]: I1003 12:52:05.543855 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:05 crc kubenswrapper[4868]: E1003 12:52:05.544097 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:05 crc kubenswrapper[4868]: E1003 12:52:05.544341 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:06 crc kubenswrapper[4868]: I1003 12:52:06.543702 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:06 crc kubenswrapper[4868]: I1003 12:52:06.543737 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:06 crc kubenswrapper[4868]: E1003 12:52:06.543870 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:06 crc kubenswrapper[4868]: E1003 12:52:06.544006 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:07 crc kubenswrapper[4868]: I1003 12:52:07.543431 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:07 crc kubenswrapper[4868]: I1003 12:52:07.543468 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:07 crc kubenswrapper[4868]: E1003 12:52:07.543613 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:07 crc kubenswrapper[4868]: E1003 12:52:07.543754 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:08 crc kubenswrapper[4868]: I1003 12:52:08.543418 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:08 crc kubenswrapper[4868]: I1003 12:52:08.543429 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:08 crc kubenswrapper[4868]: E1003 12:52:08.543550 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:08 crc kubenswrapper[4868]: E1003 12:52:08.543616 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:09 crc kubenswrapper[4868]: I1003 12:52:09.543920 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:09 crc kubenswrapper[4868]: I1003 12:52:09.543924 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:09 crc kubenswrapper[4868]: E1003 12:52:09.544332 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:09 crc kubenswrapper[4868]: E1003 12:52:09.544493 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:09 crc kubenswrapper[4868]: E1003 12:52:09.654553 4868 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 12:52:10 crc kubenswrapper[4868]: I1003 12:52:10.543942 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:10 crc kubenswrapper[4868]: I1003 12:52:10.543960 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:10 crc kubenswrapper[4868]: E1003 12:52:10.544116 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:10 crc kubenswrapper[4868]: E1003 12:52:10.544159 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:11 crc kubenswrapper[4868]: I1003 12:52:11.543567 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:11 crc kubenswrapper[4868]: I1003 12:52:11.543638 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:11 crc kubenswrapper[4868]: E1003 12:52:11.543709 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:11 crc kubenswrapper[4868]: E1003 12:52:11.543839 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:12 crc kubenswrapper[4868]: I1003 12:52:12.543821 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:12 crc kubenswrapper[4868]: I1003 12:52:12.543869 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:12 crc kubenswrapper[4868]: E1003 12:52:12.543987 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:12 crc kubenswrapper[4868]: E1003 12:52:12.544096 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:13 crc kubenswrapper[4868]: I1003 12:52:13.543456 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:13 crc kubenswrapper[4868]: E1003 12:52:13.543593 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:13 crc kubenswrapper[4868]: I1003 12:52:13.543636 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:13 crc kubenswrapper[4868]: E1003 12:52:13.543798 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:14 crc kubenswrapper[4868]: I1003 12:52:14.543741 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:14 crc kubenswrapper[4868]: I1003 12:52:14.543801 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:14 crc kubenswrapper[4868]: E1003 12:52:14.544925 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:14 crc kubenswrapper[4868]: E1003 12:52:14.545066 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:14 crc kubenswrapper[4868]: E1003 12:52:14.655079 4868 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 12:52:15 crc kubenswrapper[4868]: I1003 12:52:15.543552 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:15 crc kubenswrapper[4868]: E1003 12:52:15.543705 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:15 crc kubenswrapper[4868]: I1003 12:52:15.543839 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:15 crc kubenswrapper[4868]: E1003 12:52:15.544151 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:16 crc kubenswrapper[4868]: I1003 12:52:16.543508 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:16 crc kubenswrapper[4868]: I1003 12:52:16.543622 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:16 crc kubenswrapper[4868]: E1003 12:52:16.543664 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:16 crc kubenswrapper[4868]: E1003 12:52:16.543799 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:17 crc kubenswrapper[4868]: I1003 12:52:17.543692 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:17 crc kubenswrapper[4868]: E1003 12:52:17.543819 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:17 crc kubenswrapper[4868]: I1003 12:52:17.543864 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:17 crc kubenswrapper[4868]: E1003 12:52:17.543997 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:18 crc kubenswrapper[4868]: I1003 12:52:18.543284 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:18 crc kubenswrapper[4868]: E1003 12:52:18.543533 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:18 crc kubenswrapper[4868]: I1003 12:52:18.543953 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:18 crc kubenswrapper[4868]: E1003 12:52:18.544541 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:18 crc kubenswrapper[4868]: I1003 12:52:18.544827 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.235558 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/3.log" Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.239096 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerStarted","Data":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.239696 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.269534 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=28.26950622 podStartE2EDuration="28.26950622s" podCreationTimestamp="2025-10-03 12:51:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:51:54.575128699 +0000 UTC m=+110.784977785" watchObservedRunningTime="2025-10-03 12:52:19.26950622 +0000 UTC m=+135.479355286" Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.270261 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podStartSLOduration=114.27025523 podStartE2EDuration="1m54.27025523s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:19.268688298 +0000 UTC m=+135.478537374" watchObservedRunningTime="2025-10-03 12:52:19.27025523 +0000 UTC m=+135.480104306" Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.534038 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nwqvb"] Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.534681 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:19 crc kubenswrapper[4868]: E1003 12:52:19.534793 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:19 crc kubenswrapper[4868]: I1003 12:52:19.543906 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:19 crc kubenswrapper[4868]: E1003 12:52:19.544069 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:19 crc kubenswrapper[4868]: E1003 12:52:19.656668 4868 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Oct 03 12:52:20 crc kubenswrapper[4868]: I1003 12:52:20.543334 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:20 crc kubenswrapper[4868]: E1003 12:52:20.543476 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:20 crc kubenswrapper[4868]: I1003 12:52:20.543635 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:20 crc kubenswrapper[4868]: E1003 12:52:20.543685 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:21 crc kubenswrapper[4868]: I1003 12:52:21.543272 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:21 crc kubenswrapper[4868]: I1003 12:52:21.543300 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:21 crc kubenswrapper[4868]: E1003 12:52:21.543434 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:21 crc kubenswrapper[4868]: E1003 12:52:21.543581 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:22 crc kubenswrapper[4868]: I1003 12:52:22.543020 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:22 crc kubenswrapper[4868]: E1003 12:52:22.543174 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:22 crc kubenswrapper[4868]: I1003 12:52:22.543015 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:22 crc kubenswrapper[4868]: E1003 12:52:22.543267 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:23 crc kubenswrapper[4868]: I1003 12:52:23.543706 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:23 crc kubenswrapper[4868]: E1003 12:52:23.543836 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-nwqvb" podUID="4fc2d690-5dcc-4f98-8607-0b3909f44c23" Oct 03 12:52:23 crc kubenswrapper[4868]: I1003 12:52:23.544236 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:23 crc kubenswrapper[4868]: E1003 12:52:23.544284 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Oct 03 12:52:24 crc kubenswrapper[4868]: I1003 12:52:24.543906 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:24 crc kubenswrapper[4868]: I1003 12:52:24.544016 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:24 crc kubenswrapper[4868]: E1003 12:52:24.545003 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Oct 03 12:52:24 crc kubenswrapper[4868]: E1003 12:52:24.545182 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Oct 03 12:52:25 crc kubenswrapper[4868]: I1003 12:52:25.543185 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:25 crc kubenswrapper[4868]: I1003 12:52:25.543214 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:25 crc kubenswrapper[4868]: I1003 12:52:25.546584 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Oct 03 12:52:25 crc kubenswrapper[4868]: I1003 12:52:25.546587 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Oct 03 12:52:25 crc kubenswrapper[4868]: I1003 12:52:25.546721 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Oct 03 12:52:25 crc kubenswrapper[4868]: I1003 12:52:25.546634 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Oct 03 12:52:26 crc kubenswrapper[4868]: I1003 12:52:26.543511 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:26 crc kubenswrapper[4868]: I1003 12:52:26.543561 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:26 crc kubenswrapper[4868]: I1003 12:52:26.546105 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Oct 03 12:52:26 crc kubenswrapper[4868]: I1003 12:52:26.546108 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.146112 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.146188 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.568522 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.568616 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.568635 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:32 crc kubenswrapper[4868]: E1003 12:52:32.568689 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:54:34.568671367 +0000 UTC m=+270.778520433 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.568734 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.568770 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.569971 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.574018 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.574035 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.582077 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.770274 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.858495 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Oct 03 12:52:32 crc kubenswrapper[4868]: I1003 12:52:32.867812 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Oct 03 12:52:33 crc kubenswrapper[4868]: W1003 12:52:33.086879 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-5f3586bdd13fd99215d67c6e963aab2deceaf308030a34acc236389769ef81f9 WatchSource:0}: Error finding container 5f3586bdd13fd99215d67c6e963aab2deceaf308030a34acc236389769ef81f9: Status 404 returned error can't find the container with id 5f3586bdd13fd99215d67c6e963aab2deceaf308030a34acc236389769ef81f9 Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.284401 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"09f9954b7b9a5b5a4119144493ab625be43cfdd736e1099dfe4361b206d03814"} Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.284489 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"1fe2412cb330fcbf09381765f3228c2fa5d08bdb153088670029f8f2e3ffd1c5"} Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.287667 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"bfe10b223b47c82f671417d89182c3b8a43302e5246c0cd0f16c2ab5a9a3222b"} Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.287722 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"5f3586bdd13fd99215d67c6e963aab2deceaf308030a34acc236389769ef81f9"} Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.288967 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"1aba5525c222a2227c36c234429d43dae14da36748491fe5c7458543c3472261"} Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.288995 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"e843e25e8e9e084c63c0535ca7e26b733cb1f8d9e926a373fba78f48a086eb03"} Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.289243 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.777823 4868 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.810921 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pkh4z"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.811502 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.811934 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.812230 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.812891 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qzzd4"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.813387 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.813709 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6fp9h"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.814442 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.819329 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.821862 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.821886 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.822405 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.832319 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.832529 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.832644 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.832721 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.832900 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.833059 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.834339 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.834729 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835019 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835372 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835460 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835551 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835548 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835624 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835664 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835674 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835676 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835847 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.835850 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.836004 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.836286 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.836324 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.838229 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.839533 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.840424 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.843410 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-szgx9"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.843679 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.843751 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.843999 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.844022 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.844151 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.844205 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.844802 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.845235 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5txl8"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.845312 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.845409 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.846111 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4cxr5"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.846419 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-z5zzt"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.846718 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-tr6fl"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.847051 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.847083 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.847155 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.847287 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-z5zzt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.855944 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.856091 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.856181 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.856360 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.860384 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.861250 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.861298 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.862202 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.862364 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.862831 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.863503 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.863990 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864150 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864151 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864911 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864284 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.865035 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.865051 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864344 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864469 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864477 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864593 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864637 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.865287 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864662 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864681 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.865475 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864713 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.864749 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.865690 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.865709 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.865940 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.866546 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.867436 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.867764 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.869159 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.869311 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.869483 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.869639 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.869794 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.870166 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.870308 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.870535 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.870652 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.870823 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cdsl9"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.871033 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.871195 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.871371 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.871599 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.871633 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.884226 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.887783 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.889481 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.890275 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.890735 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.891682 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.892709 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.892830 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.892912 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.893680 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-czpdv"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.893873 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.894166 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.896216 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.896432 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.896649 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.896840 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897005 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897107 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897112 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897216 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897537 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897684 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897797 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897954 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897974 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-955rh"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.898622 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.898799 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897683 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.900036 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.897747 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.900217 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.900848 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.906767 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.907405 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.907874 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.907885 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.907986 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908088 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908255 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908321 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908378 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908467 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908508 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908654 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908704 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908666 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.908891 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.909130 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.910046 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.910570 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.913419 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.913989 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.914888 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.915599 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.915893 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hpssf"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.916375 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.916592 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.918596 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.919054 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.919363 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.919811 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.920042 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.920201 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.922151 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qjpnr"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.922549 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-wwvt7"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.922602 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.922921 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.925640 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.926828 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.927239 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.927322 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.927645 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.929966 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pkh4z"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.930200 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.935387 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.937306 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-szgx9"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.937667 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.938666 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-dq7lc"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.946285 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.946323 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6fp9h"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.946406 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.948597 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.976124 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.982124 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.982200 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.982214 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-775r2"] Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.987772 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/587fdffe-2a05-4308-b88b-48837d1f256d-tmpfs\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.987842 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5afabba8-1052-41e6-af8f-83da47c0d71a-metrics-tls\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.987873 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.987900 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fb971e6-b551-4410-aea5-5a8286a03737-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.987930 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.987957 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hpcqr\" (UniqueName: \"kubernetes.io/projected/38ba9fb1-abc5-45a3-b878-46d32e466672-kube-api-access-hpcqr\") pod \"downloads-7954f5f757-z5zzt\" (UID: \"38ba9fb1-abc5-45a3-b878-46d32e466672\") " pod="openshift-console/downloads-7954f5f757-z5zzt" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.987983 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/24f70e5b-2400-4370-9c85-e41ad7f06e05-metrics-tls\") pod \"dns-operator-744455d44c-5txl8\" (UID: \"24f70e5b-2400-4370-9c85-e41ad7f06e05\") " pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:33 crc kubenswrapper[4868]: I1003 12:52:33.988008 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.002902 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-775r2" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:33.988039 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-proxy-tls\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004386 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d31eb7f7-fae9-47b2-a16e-68b08da804bb-signing-key\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004464 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7zvbc\" (UniqueName: \"kubernetes.io/projected/5afabba8-1052-41e6-af8f-83da47c0d71a-kube-api-access-7zvbc\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004506 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/587fdffe-2a05-4308-b88b-48837d1f256d-apiservice-cert\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004529 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5afabba8-1052-41e6-af8f-83da47c0d71a-trusted-ca\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004551 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pb4zs\" (UniqueName: \"kubernetes.io/projected/8fb971e6-b551-4410-aea5-5a8286a03737-kube-api-access-pb4zs\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004584 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggjt9\" (UniqueName: \"kubernetes.io/projected/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-kube-api-access-ggjt9\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004625 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/587fdffe-2a05-4308-b88b-48837d1f256d-webhook-cert\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004666 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d31eb7f7-fae9-47b2-a16e-68b08da804bb-signing-cabundle\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004695 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lnh6\" (UniqueName: \"kubernetes.io/projected/24f70e5b-2400-4370-9c85-e41ad7f06e05-kube-api-access-5lnh6\") pod \"dns-operator-744455d44c-5txl8\" (UID: \"24f70e5b-2400-4370-9c85-e41ad7f06e05\") " pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004721 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5afabba8-1052-41e6-af8f-83da47c0d71a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004715 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.004743 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-config\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005025 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fb971e6-b551-4410-aea5-5a8286a03737-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005141 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzmjz\" (UniqueName: \"kubernetes.io/projected/d31eb7f7-fae9-47b2-a16e-68b08da804bb-kube-api-access-kzmjz\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005252 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-srv-cert\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005308 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-st2hg\" (UniqueName: \"kubernetes.io/projected/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-kube-api-access-st2hg\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005363 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-serving-cert\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005510 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6llw6\" (UniqueName: \"kubernetes.io/projected/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-kube-api-access-6llw6\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005546 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-client-ca\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005586 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfrjz\" (UniqueName: \"kubernetes.io/projected/587fdffe-2a05-4308-b88b-48837d1f256d-kube-api-access-lfrjz\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.005631 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-images\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.031386 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.032225 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.035587 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-955rh"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.035994 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-czpdv"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.037271 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4cxr5"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.040024 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qzzd4"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.046589 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.047137 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.054380 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.056029 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.060545 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.061025 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.064971 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.065192 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.065023 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.065680 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.067876 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.069539 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.070275 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-tr6fl"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.071833 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.073120 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hpssf"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.076187 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.076248 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.076262 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-z5zzt"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.080097 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.081508 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5txl8"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.087975 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ktkcc"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.089508 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.091485 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cdsl9"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.093318 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.093678 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.100608 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-775r2"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106227 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fb971e6-b551-4410-aea5-5a8286a03737-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106274 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106327 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106354 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hpcqr\" (UniqueName: \"kubernetes.io/projected/38ba9fb1-abc5-45a3-b878-46d32e466672-kube-api-access-hpcqr\") pod \"downloads-7954f5f757-z5zzt\" (UID: \"38ba9fb1-abc5-45a3-b878-46d32e466672\") " pod="openshift-console/downloads-7954f5f757-z5zzt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106377 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/24f70e5b-2400-4370-9c85-e41ad7f06e05-metrics-tls\") pod \"dns-operator-744455d44c-5txl8\" (UID: \"24f70e5b-2400-4370-9c85-e41ad7f06e05\") " pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106399 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106432 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-proxy-tls\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106455 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d31eb7f7-fae9-47b2-a16e-68b08da804bb-signing-key\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106490 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7zvbc\" (UniqueName: \"kubernetes.io/projected/5afabba8-1052-41e6-af8f-83da47c0d71a-kube-api-access-7zvbc\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106514 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/587fdffe-2a05-4308-b88b-48837d1f256d-apiservice-cert\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106535 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pb4zs\" (UniqueName: \"kubernetes.io/projected/8fb971e6-b551-4410-aea5-5a8286a03737-kube-api-access-pb4zs\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106557 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5afabba8-1052-41e6-af8f-83da47c0d71a-trusted-ca\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106578 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggjt9\" (UniqueName: \"kubernetes.io/projected/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-kube-api-access-ggjt9\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106598 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/587fdffe-2a05-4308-b88b-48837d1f256d-webhook-cert\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106616 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d31eb7f7-fae9-47b2-a16e-68b08da804bb-signing-cabundle\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106655 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lnh6\" (UniqueName: \"kubernetes.io/projected/24f70e5b-2400-4370-9c85-e41ad7f06e05-kube-api-access-5lnh6\") pod \"dns-operator-744455d44c-5txl8\" (UID: \"24f70e5b-2400-4370-9c85-e41ad7f06e05\") " pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106676 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5afabba8-1052-41e6-af8f-83da47c0d71a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106697 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-config\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106717 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fb971e6-b551-4410-aea5-5a8286a03737-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106739 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzmjz\" (UniqueName: \"kubernetes.io/projected/d31eb7f7-fae9-47b2-a16e-68b08da804bb-kube-api-access-kzmjz\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106766 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-st2hg\" (UniqueName: \"kubernetes.io/projected/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-kube-api-access-st2hg\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106787 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-serving-cert\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106807 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-srv-cert\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106828 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6llw6\" (UniqueName: \"kubernetes.io/projected/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-kube-api-access-6llw6\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106848 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-client-ca\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106867 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfrjz\" (UniqueName: \"kubernetes.io/projected/587fdffe-2a05-4308-b88b-48837d1f256d-kube-api-access-lfrjz\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106884 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-images\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106902 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/587fdffe-2a05-4308-b88b-48837d1f256d-tmpfs\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.106945 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5afabba8-1052-41e6-af8f-83da47c0d71a-metrics-tls\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.108210 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.109652 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-config\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.109756 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8fb971e6-b551-4410-aea5-5a8286a03737-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.110033 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-auth-proxy-config\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.110242 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.110680 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/587fdffe-2a05-4308-b88b-48837d1f256d-tmpfs\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.111420 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-client-ca\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.111703 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5afabba8-1052-41e6-af8f-83da47c0d71a-trusted-ca\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.112815 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/24f70e5b-2400-4370-9c85-e41ad7f06e05-metrics-tls\") pod \"dns-operator-744455d44c-5txl8\" (UID: \"24f70e5b-2400-4370-9c85-e41ad7f06e05\") " pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.113910 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/5afabba8-1052-41e6-af8f-83da47c0d71a-metrics-tls\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.114534 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-serving-cert\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.115819 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qjpnr"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.116634 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.117821 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dq7lc"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.119344 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ktkcc"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.122993 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-vbs4j"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.123757 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-cxlqt"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.124731 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.124720 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.125553 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vbs4j"] Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.127518 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.128324 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8fb971e6-b551-4410-aea5-5a8286a03737-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.134742 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.156096 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.174697 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.193973 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.214125 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.240263 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.253898 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.273612 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.281636 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-images\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.294123 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.313746 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.322397 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-proxy-tls\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.335151 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.360002 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.374416 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.381729 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/587fdffe-2a05-4308-b88b-48837d1f256d-webhook-cert\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.384397 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/587fdffe-2a05-4308-b88b-48837d1f256d-apiservice-cert\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.393703 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.415378 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.433526 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.443537 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-profile-collector-cert\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.453891 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.463254 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-srv-cert\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.474443 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.494024 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.520986 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.533782 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.553769 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.574436 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.593716 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.614348 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.633405 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.643914 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/d31eb7f7-fae9-47b2-a16e-68b08da804bb-signing-key\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.653201 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.658748 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/d31eb7f7-fae9-47b2-a16e-68b08da804bb-signing-cabundle\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.674249 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.713772 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.734888 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.753984 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.774247 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.813637 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.834518 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.854497 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.874894 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.893524 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.915373 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.931862 4868 request.go:700] Waited for 1.011801413s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-cluster-samples-operator/configmaps?fieldSelector=metadata.name%3Dopenshift-service-ca.crt&limit=500&resourceVersion=0 Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.934569 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.953780 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.975243 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Oct 03 12:52:34 crc kubenswrapper[4868]: I1003 12:52:34.995241 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.014233 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.033918 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.053865 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.073908 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.093743 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.114318 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.134144 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.153431 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.173660 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.193775 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.214019 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.244916 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.255378 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.274026 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.294362 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.313748 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.334495 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.354175 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.374846 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.394619 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.414655 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.434402 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.453906 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.473916 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.493591 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.515155 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.535099 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.554303 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.574511 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.600825 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.614135 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.634192 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.654445 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.674030 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.694926 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.714716 4868 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.749672 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-st2hg\" (UniqueName: \"kubernetes.io/projected/1ad1cf81-62ab-48c3-af04-6dd5b63b46dd-kube-api-access-st2hg\") pod \"catalog-operator-68c6474976-4zh86\" (UID: \"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.770483 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lnh6\" (UniqueName: \"kubernetes.io/projected/24f70e5b-2400-4370-9c85-e41ad7f06e05-kube-api-access-5lnh6\") pod \"dns-operator-744455d44c-5txl8\" (UID: \"24f70e5b-2400-4370-9c85-e41ad7f06e05\") " pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.790171 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5afabba8-1052-41e6-af8f-83da47c0d71a-bound-sa-token\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.809452 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzmjz\" (UniqueName: \"kubernetes.io/projected/d31eb7f7-fae9-47b2-a16e-68b08da804bb-kube-api-access-kzmjz\") pod \"service-ca-9c57cc56f-955rh\" (UID: \"d31eb7f7-fae9-47b2-a16e-68b08da804bb\") " pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.829822 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfrjz\" (UniqueName: \"kubernetes.io/projected/587fdffe-2a05-4308-b88b-48837d1f256d-kube-api-access-lfrjz\") pod \"packageserver-d55dfcdfc-x5wxv\" (UID: \"587fdffe-2a05-4308-b88b-48837d1f256d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.849989 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7zvbc\" (UniqueName: \"kubernetes.io/projected/5afabba8-1052-41e6-af8f-83da47c0d71a-kube-api-access-7zvbc\") pod \"ingress-operator-5b745b69d9-k8ftf\" (UID: \"5afabba8-1052-41e6-af8f-83da47c0d71a\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.875897 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pb4zs\" (UniqueName: \"kubernetes.io/projected/8fb971e6-b551-4410-aea5-5a8286a03737-kube-api-access-pb4zs\") pod \"openshift-controller-manager-operator-756b6f6bc6-dpnf7\" (UID: \"8fb971e6-b551-4410-aea5-5a8286a03737\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.876147 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.888090 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggjt9\" (UniqueName: \"kubernetes.io/projected/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-kube-api-access-ggjt9\") pod \"controller-manager-879f6c89f-6fp9h\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.911182 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6llw6\" (UniqueName: \"kubernetes.io/projected/b0ce17a1-24a9-4a8a-93ef-adf14c62816f-kube-api-access-6llw6\") pod \"machine-config-operator-74547568cd-x6p6m\" (UID: \"b0ce17a1-24a9-4a8a-93ef-adf14c62816f\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.931897 4868 request.go:700] Waited for 1.806731295s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/secrets?fieldSelector=metadata.name%3Dmachine-config-server-tls&limit=500&resourceVersion=0 Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.933684 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hpcqr\" (UniqueName: \"kubernetes.io/projected/38ba9fb1-abc5-45a3-b878-46d32e466672-kube-api-access-hpcqr\") pod \"downloads-7954f5f757-z5zzt\" (UID: \"38ba9fb1-abc5-45a3-b878-46d32e466672\") " pod="openshift-console/downloads-7954f5f757-z5zzt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.933832 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.954747 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.975005 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.990382 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:35 crc kubenswrapper[4868]: I1003 12:52:35.994294 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.002410 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.014557 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.017875 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.031646 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.033231 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-5txl8"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.033877 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.040229 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:36 crc kubenswrapper[4868]: W1003 12:52:36.047781 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod24f70e5b_2400_4370_9c85_e41ad7f06e05.slice/crio-d679c6dc112b14f53bc844a63fd76131624c42c02c353c4ebf5719d9d51cbdfd WatchSource:0}: Error finding container d679c6dc112b14f53bc844a63fd76131624c42c02c353c4ebf5719d9d51cbdfd: Status 404 returned error can't find the container with id d679c6dc112b14f53bc844a63fd76131624c42c02c353c4ebf5719d9d51cbdfd Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.053655 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.060511 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-955rh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.104695 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126145 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-policies\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126229 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126253 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-service-ca-bundle\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126294 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e70f3d2-5b07-494a-8edc-dcc8e541752e-trusted-ca\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126386 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126406 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126656 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-encryption-config\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126676 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g9hvr\" (UniqueName: \"kubernetes.io/projected/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-kube-api-access-g9hvr\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126717 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-config\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126740 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crp25\" (UniqueName: \"kubernetes.io/projected/1c441e29-000c-4055-b932-6d70f2f4d82e-kube-api-access-crp25\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126766 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126811 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126833 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126854 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25d8d7db-4938-4621-880a-033692278381-config\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126869 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h4jr2\" (UniqueName: \"kubernetes.io/projected/000a8020-feb7-4a25-95a7-a13fdfa39109-kube-api-access-h4jr2\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126888 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126904 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126923 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/997a6d06-ce81-4866-9055-04d6ff8c635f-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126940 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126960 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-config\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.126985 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127003 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5fkjq\" (UniqueName: \"kubernetes.io/projected/25d8d7db-4938-4621-880a-033692278381-kube-api-access-5fkjq\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127021 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-config\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127038 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127068 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45f565d0-329e-4a36-9138-91fa8da747fa-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127089 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45f565d0-329e-4a36-9138-91fa8da747fa-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127107 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5chl\" (UniqueName: \"kubernetes.io/projected/45f565d0-329e-4a36-9138-91fa8da747fa-kube-api-access-j5chl\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127127 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df67ce79-06d5-4d3f-a54e-b77cad420085-serving-cert\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127152 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjzsk\" (UniqueName: \"kubernetes.io/projected/3e67dcd0-021f-4c7e-aaa5-351df73f9f2a-kube-api-access-hjzsk\") pod \"control-plane-machine-set-operator-78cbb6b69f-8ftk2\" (UID: \"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127175 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127197 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-tls\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127228 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/25d8d7db-4938-4621-880a-033692278381-machine-approver-tls\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127286 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-config\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.127303 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:36.627288857 +0000 UTC m=+152.837137913 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127341 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-config\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127368 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-client-ca\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127442 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c441e29-000c-4055-b932-6d70f2f4d82e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127461 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-service-ca\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127485 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127499 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mk88n\" (UniqueName: \"kubernetes.io/projected/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-kube-api-access-mk88n\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127528 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127545 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127560 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-etcd-client\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127578 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4538295a-b047-4bba-999b-9d35082ad0c5-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127598 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-etcd-client\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127732 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-serving-cert\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127814 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/997a6d06-ce81-4866-9055-04d6ff8c635f-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127846 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4td7h\" (UniqueName: \"kubernetes.io/projected/b8395931-a0eb-4b94-a5e8-f58c3aea5992-kube-api-access-4td7h\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127891 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4538295a-b047-4bba-999b-9d35082ad0c5-config\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127921 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-client\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127963 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwrg7\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-kube-api-access-zwrg7\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.127989 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bcwf\" (UniqueName: \"kubernetes.io/projected/4538295a-b047-4bba-999b-9d35082ad0c5-kube-api-access-2bcwf\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128012 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128041 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128105 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-serving-cert\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128135 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128155 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxqd9\" (UniqueName: \"kubernetes.io/projected/0c5660b3-e155-4397-89c5-50fd4bb54dba-kube-api-access-zxqd9\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128186 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-audit-policies\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128212 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128235 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8395931-a0eb-4b94-a5e8-f58c3aea5992-serving-cert\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128299 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128324 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e70f3d2-5b07-494a-8edc-dcc8e541752e-serving-cert\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128367 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-audit\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128391 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-image-import-ca\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128419 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-certificates\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128439 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-trusted-ca\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128465 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-etcd-serving-ca\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128486 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e70f3d2-5b07-494a-8edc-dcc8e541752e-config\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128515 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c5660b3-e155-4397-89c5-50fd4bb54dba-serving-cert\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128545 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/000a8020-feb7-4a25-95a7-a13fdfa39109-audit-dir\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128587 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c441e29-000c-4055-b932-6d70f2f4d82e-proxy-tls\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128624 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128646 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-ca\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128664 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-bound-sa-token\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128690 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128755 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5ac2a283-f340-4724-8304-86142053130b-audit-dir\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128776 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4538295a-b047-4bba-999b-9d35082ad0c5-images\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128818 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kh78\" (UniqueName: \"kubernetes.io/projected/df67ce79-06d5-4d3f-a54e-b77cad420085-kube-api-access-7kh78\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128848 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-config\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128872 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128891 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-serving-cert\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.128920 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-encryption-config\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.129038 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e67dcd0-021f-4c7e-aaa5-351df73f9f2a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-8ftk2\" (UID: \"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.129089 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-dir\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.129135 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/000a8020-feb7-4a25-95a7-a13fdfa39109-node-pullsecrets\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.129153 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/25d8d7db-4938-4621-880a-033692278381-auth-proxy-config\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.129247 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lz69r\" (UniqueName: \"kubernetes.io/projected/5e70f3d2-5b07-494a-8edc-dcc8e541752e-kube-api-access-lz69r\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.129264 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smch7\" (UniqueName: \"kubernetes.io/projected/5ac2a283-f340-4724-8304-86142053130b-kube-api-access-smch7\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.129281 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/997a6d06-ce81-4866-9055-04d6ff8c635f-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.194477 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-z5zzt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.201946 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6fp9h"] Oct 03 12:52:36 crc kubenswrapper[4868]: W1003 12:52:36.215235 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod240cf9f9_30a6_46ed_bde7_02cdd3fe51cf.slice/crio-baf097e1ecc3f345daa32087d805c113ea44ecb8694dace0970a07ca8380b65b WatchSource:0}: Error finding container baf097e1ecc3f345daa32087d805c113ea44ecb8694dace0970a07ca8380b65b: Status 404 returned error can't find the container with id baf097e1ecc3f345daa32087d805c113ea44ecb8694dace0970a07ca8380b65b Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.227921 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.230783 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231105 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5chl\" (UniqueName: \"kubernetes.io/projected/45f565d0-329e-4a36-9138-91fa8da747fa-kube-api-access-j5chl\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231228 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df67ce79-06d5-4d3f-a54e-b77cad420085-serving-cert\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231264 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvd99\" (UniqueName: \"kubernetes.io/projected/89de9048-c2c6-4caf-9ed2-17eee2161f84-kube-api-access-jvd99\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231293 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjzsk\" (UniqueName: \"kubernetes.io/projected/3e67dcd0-021f-4c7e-aaa5-351df73f9f2a-kube-api-access-hjzsk\") pod \"control-plane-machine-set-operator-78cbb6b69f-8ftk2\" (UID: \"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231320 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231344 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8586186d-7b44-4899-8eae-82c717ea38f5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hpssf\" (UID: \"8586186d-7b44-4899-8eae-82c717ea38f5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231369 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/25d8d7db-4938-4621-880a-033692278381-machine-approver-tls\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231395 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-config\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231421 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231461 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-tls\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231483 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-oauth-config\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231512 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-config\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231532 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-client-ca\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.231997 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:36.731968331 +0000 UTC m=+152.941817397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.231556 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9krfm\" (UniqueName: \"kubernetes.io/projected/2677264c-dfc3-41b9-af63-bcaab6205cdd-kube-api-access-9krfm\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.232230 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c441e29-000c-4055-b932-6d70f2f4d82e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.232267 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-service-ca\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.232337 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.232364 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.232388 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mk88n\" (UniqueName: \"kubernetes.io/projected/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-kube-api-access-mk88n\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.232419 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ch7r\" (UniqueName: \"kubernetes.io/projected/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-kube-api-access-8ch7r\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.232445 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-srv-cert\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.234887 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4538295a-b047-4bba-999b-9d35082ad0c5-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.234937 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-etcd-client\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.234964 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zgq2k\" (UniqueName: \"kubernetes.io/projected/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-kube-api-access-zgq2k\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.235000 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.235028 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-etcd-client\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.235108 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hlf5l\" (UniqueName: \"kubernetes.io/projected/a4cfd01a-748d-42ec-9d69-bdf306168942-kube-api-access-hlf5l\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.236591 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-serving-cert\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.236631 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4td7h\" (UniqueName: \"kubernetes.io/projected/b8395931-a0eb-4b94-a5e8-f58c3aea5992-kube-api-access-4td7h\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.236658 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/997a6d06-ce81-4866-9055-04d6ff8c635f-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.236687 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4538295a-b047-4bba-999b-9d35082ad0c5-config\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.236800 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-client\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.241894 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-config\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.242913 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.243037 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2677264c-dfc3-41b9-af63-bcaab6205cdd-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.243164 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwrg7\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-kube-api-access-zwrg7\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.243189 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c441e29-000c-4055-b932-6d70f2f4d82e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.243202 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bcwf\" (UniqueName: \"kubernetes.io/projected/4538295a-b047-4bba-999b-9d35082ad0c5-kube-api-access-2bcwf\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.243247 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.243277 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6m5p\" (UniqueName: \"kubernetes.io/projected/8586186d-7b44-4899-8eae-82c717ea38f5-kube-api-access-j6m5p\") pod \"multus-admission-controller-857f4d67dd-hpssf\" (UID: \"8586186d-7b44-4899-8eae-82c717ea38f5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.243937 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-ca-trust-extracted\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.244656 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-service-ca\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.246159 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-client-ca\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.246291 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-trusted-ca-bundle\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.247133 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.248588 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-serving-cert\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.248643 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.248804 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df67ce79-06d5-4d3f-a54e-b77cad420085-serving-cert\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.251325 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4538295a-b047-4bba-999b-9d35082ad0c5-config\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.252825 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/997a6d06-ce81-4866-9055-04d6ff8c635f-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.253333 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxqd9\" (UniqueName: \"kubernetes.io/projected/0c5660b3-e155-4397-89c5-50fd4bb54dba-kube-api-access-zxqd9\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.253706 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-registration-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.253961 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8395931-a0eb-4b94-a5e8-f58c3aea5992-serving-cert\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.253992 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-metrics-certs\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.259606 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.259798 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-audit-policies\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.260351 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.260378 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.261753 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-config\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.261932 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.262585 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5ac2a283-f340-4724-8304-86142053130b-audit-policies\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.262808 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.262861 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-etcd-client\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.262884 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e70f3d2-5b07-494a-8edc-dcc8e541752e-serving-cert\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.262938 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-config-volume\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.263507 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-client\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.263939 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5a59f7f-625d-436b-98ab-a46920a34fe8-serving-cert\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.264354 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-audit\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.265341 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-audit\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.268941 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-serving-cert\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.270862 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-installation-pull-secrets\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.272018 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5e70f3d2-5b07-494a-8edc-dcc8e541752e-serving-cert\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.275146 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.278185 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/4538295a-b047-4bba-999b-9d35082ad0c5-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.279964 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-plugins-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280036 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-image-import-ca\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280089 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxz89\" (UniqueName: \"kubernetes.io/projected/3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5-kube-api-access-zxz89\") pod \"ingress-canary-vbs4j\" (UID: \"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5\") " pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280118 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-stats-auth\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280145 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-certificates\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280473 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-trusted-ca\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280504 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-etcd-client\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280526 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-etcd-serving-ca\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280549 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e70f3d2-5b07-494a-8edc-dcc8e541752e-config\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280886 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c5660b3-e155-4397-89c5-50fd4bb54dba-serving-cert\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.280933 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/000a8020-feb7-4a25-95a7-a13fdfa39109-audit-dir\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281045 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/000a8020-feb7-4a25-95a7-a13fdfa39109-audit-dir\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281077 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ec322eb-b57f-4ba6-a635-b023501af402-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gsc8t\" (UID: \"3ec322eb-b57f-4ba6-a635-b023501af402\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281120 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-service-ca\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281133 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-etcd-serving-ca\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281163 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281522 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c441e29-000c-4055-b932-6d70f2f4d82e-proxy-tls\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281559 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-ca\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281589 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-bound-sa-token\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281614 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr7zg\" (UniqueName: \"kubernetes.io/projected/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-kube-api-access-sr7zg\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281650 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.281683 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smnpr\" (UniqueName: \"kubernetes.io/projected/3ec322eb-b57f-4ba6-a635-b023501af402-kube-api-access-smnpr\") pod \"cluster-samples-operator-665b6dd947-gsc8t\" (UID: \"3ec322eb-b57f-4ba6-a635-b023501af402\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.284372 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/0c5660b3-e155-4397-89c5-50fd4bb54dba-etcd-ca\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.285515 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-trusted-ca\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.285875 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.286107 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5ac2a283-f340-4724-8304-86142053130b-audit-dir\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.286170 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4538295a-b047-4bba-999b-9d35082ad0c5-images\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.286263 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kh78\" (UniqueName: \"kubernetes.io/projected/df67ce79-06d5-4d3f-a54e-b77cad420085-kube-api-access-7kh78\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.286306 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmm5q\" (UniqueName: \"kubernetes.io/projected/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-kube-api-access-wmm5q\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.286370 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-certificates\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.286734 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0c5660b3-e155-4397-89c5-50fd4bb54dba-serving-cert\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.287079 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/4538295a-b047-4bba-999b-9d35082ad0c5-images\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.286256 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c441e29-000c-4055-b932-6d70f2f4d82e-proxy-tls\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.287145 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-serving-cert\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.287233 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-image-import-ca\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.287559 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8395931-a0eb-4b94-a5e8-f58c3aea5992-serving-cert\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.287705 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.287737 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/25d8d7db-4938-4621-880a-033692278381-machine-approver-tls\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.288046 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5chl\" (UniqueName: \"kubernetes.io/projected/45f565d0-329e-4a36-9138-91fa8da747fa-kube-api-access-j5chl\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.289668 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjzsk\" (UniqueName: \"kubernetes.io/projected/3e67dcd0-021f-4c7e-aaa5-351df73f9f2a-kube-api-access-hjzsk\") pod \"control-plane-machine-set-operator-78cbb6b69f-8ftk2\" (UID: \"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.289987 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-tls\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.290465 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f55f0e23-45aa-4f38-8fe9-125ddaa6ae66-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6c5lk\" (UID: \"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.290636 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-config\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.290680 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-serving-cert\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.291574 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.291751 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-encryption-config\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.291860 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twgmg\" (UniqueName: \"kubernetes.io/projected/e5a59f7f-625d-436b-98ab-a46920a34fe8-kube-api-access-twgmg\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292002 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e67dcd0-021f-4c7e-aaa5-351df73f9f2a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-8ftk2\" (UID: \"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292033 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-serving-cert\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292174 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-dir\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292348 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vs4f5\" (UniqueName: \"kubernetes.io/projected/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-kube-api-access-vs4f5\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292412 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/000a8020-feb7-4a25-95a7-a13fdfa39109-node-pullsecrets\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292473 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/25d8d7db-4938-4621-880a-033692278381-auth-proxy-config\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292510 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wgdw5\" (UniqueName: \"kubernetes.io/projected/346d964e-9d9d-4175-9828-ba55c3c31778-kube-api-access-wgdw5\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292575 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-config-volume\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292606 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lz69r\" (UniqueName: \"kubernetes.io/projected/5e70f3d2-5b07-494a-8edc-dcc8e541752e-kube-api-access-lz69r\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292652 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smch7\" (UniqueName: \"kubernetes.io/projected/5ac2a283-f340-4724-8304-86142053130b-kube-api-access-smch7\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292681 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/997a6d06-ce81-4866-9055-04d6ff8c635f-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292714 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-oauth-serving-cert\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.292849 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-policies\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.294273 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-dir\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.294345 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5ac2a283-f340-4724-8304-86142053130b-audit-dir\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.296031 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/000a8020-feb7-4a25-95a7-a13fdfa39109-node-pullsecrets\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.297870 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/25d8d7db-4938-4621-880a-033692278381-auth-proxy-config\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.298009 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-mountpoint-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.298860 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/000a8020-feb7-4a25-95a7-a13fdfa39109-config\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.300701 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-policies\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.301594 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2677264c-dfc3-41b9-af63-bcaab6205cdd-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.301766 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.301908 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-socket-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.301922 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-encryption-config\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.301969 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302024 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-service-ca-bundle\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302080 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e70f3d2-5b07-494a-8edc-dcc8e541752e-trusted-ca\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302340 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302378 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-console-config\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302477 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302534 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302573 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6n7zj\" (UniqueName: \"kubernetes.io/projected/2c0fb16a-559e-47e9-98f3-54563b3bddf4-kube-api-access-6n7zj\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302604 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-metrics-tls\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302704 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-encryption-config\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302732 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g9hvr\" (UniqueName: \"kubernetes.io/projected/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-kube-api-access-g9hvr\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302774 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302800 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-node-bootstrap-token\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.302897 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5e70f3d2-5b07-494a-8edc-dcc8e541752e-config\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.303247 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-config\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.303310 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crp25\" (UniqueName: \"kubernetes.io/projected/1c441e29-000c-4055-b932-6d70f2f4d82e-kube-api-access-crp25\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.309440 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/3e67dcd0-021f-4c7e-aaa5-351df73f9f2a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-8ftk2\" (UID: \"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.309707 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-secret-volume\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.309740 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-csi-data-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.309801 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.312706 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.313007 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-service-ca-bundle\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.313246 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.309826 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.314190 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/000a8020-feb7-4a25-95a7-a13fdfa39109-serving-cert\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.314561 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-config\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.314758 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315361 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e70f3d2-5b07-494a-8edc-dcc8e541752e-trusted-ca\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315449 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315593 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315593 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315664 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25d8d7db-4938-4621-880a-033692278381-config\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315698 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h4jr2\" (UniqueName: \"kubernetes.io/projected/000a8020-feb7-4a25-95a7-a13fdfa39109-kube-api-access-h4jr2\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315816 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k58hf\" (UniqueName: \"kubernetes.io/projected/ba635e01-f221-477f-b2d0-ada4a6473f58-kube-api-access-k58hf\") pod \"migrator-59844c95c7-dzlbk\" (UID: \"ba635e01-f221-477f-b2d0-ada4a6473f58\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.315862 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316023 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316094 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/5ac2a283-f340-4724-8304-86142053130b-encryption-config\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316106 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/25d8d7db-4938-4621-880a-033692278381-config\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316652 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316679 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/997a6d06-ce81-4866-9055-04d6ff8c635f-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316721 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316820 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5a59f7f-625d-436b-98ab-a46920a34fe8-config\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316870 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2677264c-dfc3-41b9-af63-bcaab6205cdd-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.316917 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-config\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.317210 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318072 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-certs\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318216 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318300 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-default-certificate\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318335 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5fkjq\" (UniqueName: \"kubernetes.io/projected/25d8d7db-4938-4621-880a-033692278381-kube-api-access-5fkjq\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318363 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5-cert\") pod \"ingress-canary-vbs4j\" (UID: \"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5\") " pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318387 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-trusted-ca-bundle\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318416 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-config\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318440 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318474 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45f565d0-329e-4a36-9138-91fa8da747fa-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318507 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318534 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx9nq\" (UniqueName: \"kubernetes.io/projected/f55f0e23-45aa-4f38-8fe9-125ddaa6ae66-kube-api-access-sx9nq\") pod \"package-server-manager-789f6589d5-6c5lk\" (UID: \"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318577 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45f565d0-329e-4a36-9138-91fa8da747fa-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318603 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c0fb16a-559e-47e9-98f3-54563b3bddf4-service-ca-bundle\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318691 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.318929 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" event={"ID":"24f70e5b-2400-4370-9c85-e41ad7f06e05","Type":"ContainerStarted","Data":"d679c6dc112b14f53bc844a63fd76131624c42c02c353c4ebf5719d9d51cbdfd"} Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.319105 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8395931-a0eb-4b94-a5e8-f58c3aea5992-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.319505 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/997a6d06-ce81-4866-9055-04d6ff8c635f-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.320438 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.320553 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-config\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.320799 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:36.820785642 +0000 UTC m=+153.030634708 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.321145 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-config\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.321762 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/45f565d0-329e-4a36-9138-91fa8da747fa-config\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.322393 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" event={"ID":"5afabba8-1052-41e6-af8f-83da47c0d71a","Type":"ContainerStarted","Data":"807cf03d879671239efb05652618e46663fbdc32a643404f2e7f8b00205bae5e"} Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.322610 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mk88n\" (UniqueName: \"kubernetes.io/projected/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-kube-api-access-mk88n\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.323177 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.325652 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" event={"ID":"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf","Type":"ContainerStarted","Data":"baf097e1ecc3f345daa32087d805c113ea44ecb8694dace0970a07ca8380b65b"} Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.327955 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-cdsl9\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.328660 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/45f565d0-329e-4a36-9138-91fa8da747fa-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-wt988\" (UID: \"45f565d0-329e-4a36-9138-91fa8da747fa\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.332026 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4td7h\" (UniqueName: \"kubernetes.io/projected/b8395931-a0eb-4b94-a5e8-f58c3aea5992-kube-api-access-4td7h\") pod \"authentication-operator-69f744f599-szgx9\" (UID: \"b8395931-a0eb-4b94-a5e8-f58c3aea5992\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.332297 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.350190 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.352806 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwrg7\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-kube-api-access-zwrg7\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.371356 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bcwf\" (UniqueName: \"kubernetes.io/projected/4538295a-b047-4bba-999b-9d35082ad0c5-kube-api-access-2bcwf\") pod \"machine-api-operator-5694c8668f-pkh4z\" (UID: \"4538295a-b047-4bba-999b-9d35082ad0c5\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.413764 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419522 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419775 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wgdw5\" (UniqueName: \"kubernetes.io/projected/346d964e-9d9d-4175-9828-ba55c3c31778-kube-api-access-wgdw5\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419799 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-config-volume\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419827 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-oauth-serving-cert\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419848 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-mountpoint-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419865 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2677264c-dfc3-41b9-af63-bcaab6205cdd-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419887 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419907 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-socket-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419922 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419938 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6n7zj\" (UniqueName: \"kubernetes.io/projected/2c0fb16a-559e-47e9-98f3-54563b3bddf4-kube-api-access-6n7zj\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419955 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-metrics-tls\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419975 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-console-config\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.419991 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420027 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-node-bootstrap-token\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420085 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-secret-volume\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420100 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-csi-data-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420124 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k58hf\" (UniqueName: \"kubernetes.io/projected/ba635e01-f221-477f-b2d0-ada4a6473f58-kube-api-access-k58hf\") pod \"migrator-59844c95c7-dzlbk\" (UID: \"ba635e01-f221-477f-b2d0-ada4a6473f58\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420149 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5a59f7f-625d-436b-98ab-a46920a34fe8-config\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420163 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2677264c-dfc3-41b9-af63-bcaab6205cdd-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420182 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-certs\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420211 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-default-certificate\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420243 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5-cert\") pod \"ingress-canary-vbs4j\" (UID: \"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5\") " pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420262 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-trusted-ca-bundle\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420281 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420300 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c0fb16a-559e-47e9-98f3-54563b3bddf4-service-ca-bundle\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420317 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx9nq\" (UniqueName: \"kubernetes.io/projected/f55f0e23-45aa-4f38-8fe9-125ddaa6ae66-kube-api-access-sx9nq\") pod \"package-server-manager-789f6589d5-6c5lk\" (UID: \"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420335 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvd99\" (UniqueName: \"kubernetes.io/projected/89de9048-c2c6-4caf-9ed2-17eee2161f84-kube-api-access-jvd99\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420359 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8586186d-7b44-4899-8eae-82c717ea38f5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hpssf\" (UID: \"8586186d-7b44-4899-8eae-82c717ea38f5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420380 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420399 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-oauth-config\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420415 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9krfm\" (UniqueName: \"kubernetes.io/projected/2677264c-dfc3-41b9-af63-bcaab6205cdd-kube-api-access-9krfm\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420453 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ch7r\" (UniqueName: \"kubernetes.io/projected/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-kube-api-access-8ch7r\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420470 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-srv-cert\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420489 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zgq2k\" (UniqueName: \"kubernetes.io/projected/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-kube-api-access-zgq2k\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420509 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hlf5l\" (UniqueName: \"kubernetes.io/projected/a4cfd01a-748d-42ec-9d69-bdf306168942-kube-api-access-hlf5l\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420541 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2677264c-dfc3-41b9-af63-bcaab6205cdd-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420559 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6m5p\" (UniqueName: \"kubernetes.io/projected/8586186d-7b44-4899-8eae-82c717ea38f5-kube-api-access-j6m5p\") pod \"multus-admission-controller-857f4d67dd-hpssf\" (UID: \"8586186d-7b44-4899-8eae-82c717ea38f5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420589 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-registration-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420613 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-metrics-certs\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.420686 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:36.920650678 +0000 UTC m=+153.130499744 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420798 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-config-volume\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420828 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5a59f7f-625d-436b-98ab-a46920a34fe8-serving-cert\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420853 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-plugins-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420896 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxz89\" (UniqueName: \"kubernetes.io/projected/3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5-kube-api-access-zxz89\") pod \"ingress-canary-vbs4j\" (UID: \"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5\") " pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420925 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-stats-auth\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420956 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ec322eb-b57f-4ba6-a635-b023501af402-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gsc8t\" (UID: \"3ec322eb-b57f-4ba6-a635-b023501af402\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.420979 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-service-ca\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.421019 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr7zg\" (UniqueName: \"kubernetes.io/projected/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-kube-api-access-sr7zg\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.421087 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smnpr\" (UniqueName: \"kubernetes.io/projected/3ec322eb-b57f-4ba6-a635-b023501af402-kube-api-access-smnpr\") pod \"cluster-samples-operator-665b6dd947-gsc8t\" (UID: \"3ec322eb-b57f-4ba6-a635-b023501af402\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.421130 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmm5q\" (UniqueName: \"kubernetes.io/projected/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-kube-api-access-wmm5q\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.421160 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f55f0e23-45aa-4f38-8fe9-125ddaa6ae66-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6c5lk\" (UID: \"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.421187 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twgmg\" (UniqueName: \"kubernetes.io/projected/e5a59f7f-625d-436b-98ab-a46920a34fe8-kube-api-access-twgmg\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.421216 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-serving-cert\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.421240 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vs4f5\" (UniqueName: \"kubernetes.io/projected/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-kube-api-access-vs4f5\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.422790 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-config-volume\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.423369 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-oauth-serving-cert\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.424512 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-socket-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.424567 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-config-volume\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.424629 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-mountpoint-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.424924 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-plugins-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.425166 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2c0fb16a-559e-47e9-98f3-54563b3bddf4-service-ca-bundle\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.425409 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.426020 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-registration-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.426203 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/89de9048-c2c6-4caf-9ed2-17eee2161f84-csi-data-dir\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.426584 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxqd9\" (UniqueName: \"kubernetes.io/projected/0c5660b3-e155-4397-89c5-50fd4bb54dba-kube-api-access-zxqd9\") pod \"etcd-operator-b45778765-tr6fl\" (UID: \"0c5660b3-e155-4397-89c5-50fd4bb54dba\") " pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.426630 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-console-config\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.426925 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2677264c-dfc3-41b9-af63-bcaab6205cdd-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.427014 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.427053 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e5a59f7f-625d-436b-98ab-a46920a34fe8-config\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.431659 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-service-ca\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.432391 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-trusted-ca-bundle\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.435103 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e5a59f7f-625d-436b-98ab-a46920a34fe8-serving-cert\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.435533 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/8586186d-7b44-4899-8eae-82c717ea38f5-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-hpssf\" (UID: \"8586186d-7b44-4899-8eae-82c717ea38f5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.436787 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.437761 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-secret-volume\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.437834 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/f55f0e23-45aa-4f38-8fe9-125ddaa6ae66-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6c5lk\" (UID: \"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.438332 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/2677264c-dfc3-41b9-af63-bcaab6205cdd-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.438461 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5-cert\") pod \"ingress-canary-vbs4j\" (UID: \"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5\") " pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.438561 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-metrics-certs\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.438965 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-oauth-config\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.439214 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-profile-collector-cert\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.439261 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-stats-auth\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.440133 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/3ec322eb-b57f-4ba6-a635-b023501af402-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-gsc8t\" (UID: \"3ec322eb-b57f-4ba6-a635-b023501af402\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.442682 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-node-bootstrap-token\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.446137 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-bound-sa-token\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.448536 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/2c0fb16a-559e-47e9-98f3-54563b3bddf4-default-certificate\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.449692 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-srv-cert\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.450109 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.450688 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-certs\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.451112 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-metrics-tls\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:36 crc kubenswrapper[4868]: W1003 12:52:36.461436 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8fb971e6_b551_4410_aea5_5a8286a03737.slice/crio-5c8e3c8d4b28cfd4304506f041d7d8432161f8515f15d068d3082086124200f2 WatchSource:0}: Error finding container 5c8e3c8d4b28cfd4304506f041d7d8432161f8515f15d068d3082086124200f2: Status 404 returned error can't find the container with id 5c8e3c8d4b28cfd4304506f041d7d8432161f8515f15d068d3082086124200f2 Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.461565 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.462168 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/24fe616c-6ca3-4bc7-ae8d-4fca938d21fe-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-bgndt\" (UID: \"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.462744 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-serving-cert\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.473344 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kh78\" (UniqueName: \"kubernetes.io/projected/df67ce79-06d5-4d3f-a54e-b77cad420085-kube-api-access-7kh78\") pod \"route-controller-manager-6576b87f9c-rql9x\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.478753 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-z5zzt"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.499171 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/997a6d06-ce81-4866-9055-04d6ff8c635f-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-vtsbm\" (UID: \"997a6d06-ce81-4866-9055-04d6ff8c635f\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.522263 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.522419 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smch7\" (UniqueName: \"kubernetes.io/projected/5ac2a283-f340-4724-8304-86142053130b-kube-api-access-smch7\") pod \"apiserver-7bbb656c7d-4sjww\" (UID: \"5ac2a283-f340-4724-8304-86142053130b\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.523476 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.023444123 +0000 UTC m=+153.233293189 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.536378 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lz69r\" (UniqueName: \"kubernetes.io/projected/5e70f3d2-5b07-494a-8edc-dcc8e541752e-kube-api-access-lz69r\") pod \"console-operator-58897d9998-czpdv\" (UID: \"5e70f3d2-5b07-494a-8edc-dcc8e541752e\") " pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.540721 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.545327 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.557937 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.560557 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.563718 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g9hvr\" (UniqueName: \"kubernetes.io/projected/9a3325c2-fabf-4db0-8d36-d6b38bc6a399-kube-api-access-g9hvr\") pod \"openshift-config-operator-7777fb866f-qjlm6\" (UID: \"9a3325c2-fabf-4db0-8d36-d6b38bc6a399\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.591000 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.601700 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crp25\" (UniqueName: \"kubernetes.io/projected/1c441e29-000c-4055-b932-6d70f2f4d82e-kube-api-access-crp25\") pod \"machine-config-controller-84d6567774-6j5bh\" (UID: \"1c441e29-000c-4055-b932-6d70f2f4d82e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.608250 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.610676 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.613104 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-955rh"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.615726 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e33637b6-3fe6-44bd-8d23-28d3cc691dcf-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-5j9fw\" (UID: \"e33637b6-3fe6-44bd-8d23-28d3cc691dcf\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.625175 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h4jr2\" (UniqueName: \"kubernetes.io/projected/000a8020-feb7-4a25-95a7-a13fdfa39109-kube-api-access-h4jr2\") pod \"apiserver-76f77b778f-qzzd4\" (UID: \"000a8020-feb7-4a25-95a7-a13fdfa39109\") " pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.625193 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.625466 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.125404674 +0000 UTC m=+153.335253740 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.625798 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.626322 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.126311199 +0000 UTC m=+153.336160265 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.631240 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.643142 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5fkjq\" (UniqueName: \"kubernetes.io/projected/25d8d7db-4938-4621-880a-033692278381-kube-api-access-5fkjq\") pod \"machine-approver-56656f9798-c5q8n\" (UID: \"25d8d7db-4938-4621-880a-033692278381\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.649919 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.681922 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6n7zj\" (UniqueName: \"kubernetes.io/projected/2c0fb16a-559e-47e9-98f3-54563b3bddf4-kube-api-access-6n7zj\") pod \"router-default-5444994796-wwvt7\" (UID: \"2c0fb16a-559e-47e9-98f3-54563b3bddf4\") " pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.698357 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vs4f5\" (UniqueName: \"kubernetes.io/projected/4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8-kube-api-access-vs4f5\") pod \"machine-config-server-cxlqt\" (UID: \"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8\") " pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.725302 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wgdw5\" (UniqueName: \"kubernetes.io/projected/346d964e-9d9d-4175-9828-ba55c3c31778-kube-api-access-wgdw5\") pod \"marketplace-operator-79b997595-qjpnr\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.728849 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.729227 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.229207927 +0000 UTC m=+153.439056983 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.736169 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.744671 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx9nq\" (UniqueName: \"kubernetes.io/projected/f55f0e23-45aa-4f38-8fe9-125ddaa6ae66-kube-api-access-sx9nq\") pod \"package-server-manager-789f6589d5-6c5lk\" (UID: \"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.745524 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.757733 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.761228 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvd99\" (UniqueName: \"kubernetes.io/projected/89de9048-c2c6-4caf-9ed2-17eee2161f84-kube-api-access-jvd99\") pod \"csi-hostpathplugin-ktkcc\" (UID: \"89de9048-c2c6-4caf-9ed2-17eee2161f84\") " pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.764347 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-szgx9"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.779654 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr7zg\" (UniqueName: \"kubernetes.io/projected/b0eaa220-caf8-40a2-9587-fd4a6bd4f535-kube-api-access-sr7zg\") pod \"kube-storage-version-migrator-operator-b67b599dd-p2w2n\" (UID: \"b0eaa220-caf8-40a2-9587-fd4a6bd4f535\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.814722 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-tr6fl"] Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.829032 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.830091 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.830479 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.33046467 +0000 UTC m=+153.540313746 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.836020 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zgq2k\" (UniqueName: \"kubernetes.io/projected/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-kube-api-access-zgq2k\") pod \"collect-profiles-29324925-9r58b\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.837042 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-cxlqt" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.837936 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smnpr\" (UniqueName: \"kubernetes.io/projected/3ec322eb-b57f-4ba6-a635-b023501af402-kube-api-access-smnpr\") pod \"cluster-samples-operator-665b6dd947-gsc8t\" (UID: \"3ec322eb-b57f-4ba6-a635-b023501af402\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.838506 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmm5q\" (UniqueName: \"kubernetes.io/projected/b1cb9fb8-f769-4afa-86f4-6b5e240e92b3-kube-api-access-wmm5q\") pod \"olm-operator-6b444d44fb-w7bhp\" (UID: \"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.849577 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.861870 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hlf5l\" (UniqueName: \"kubernetes.io/projected/a4cfd01a-748d-42ec-9d69-bdf306168942-kube-api-access-hlf5l\") pod \"console-f9d7485db-dq7lc\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.879800 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2677264c-dfc3-41b9-af63-bcaab6205cdd-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.880017 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.880144 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.899716 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6m5p\" (UniqueName: \"kubernetes.io/projected/8586186d-7b44-4899-8eae-82c717ea38f5-kube-api-access-j6m5p\") pod \"multus-admission-controller-857f4d67dd-hpssf\" (UID: \"8586186d-7b44-4899-8eae-82c717ea38f5\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:36 crc kubenswrapper[4868]: W1003 12:52:36.915240 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8395931_a0eb_4b94_a5e8_f58c3aea5992.slice/crio-f1b64db14dfb66dc86b5a2a4f5b37c9fd8ca47bafead0799e255c0e497b7c0df WatchSource:0}: Error finding container f1b64db14dfb66dc86b5a2a4f5b37c9fd8ca47bafead0799e255c0e497b7c0df: Status 404 returned error can't find the container with id f1b64db14dfb66dc86b5a2a4f5b37c9fd8ca47bafead0799e255c0e497b7c0df Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.920735 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.932003 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:36 crc kubenswrapper[4868]: E1003 12:52:36.932673 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.432653608 +0000 UTC m=+153.642502674 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.945857 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k58hf\" (UniqueName: \"kubernetes.io/projected/ba635e01-f221-477f-b2d0-ada4a6473f58-kube-api-access-k58hf\") pod \"migrator-59844c95c7-dzlbk\" (UID: \"ba635e01-f221-477f-b2d0-ada4a6473f58\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" Oct 03 12:52:36 crc kubenswrapper[4868]: W1003 12:52:36.946633 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c5660b3_e155_4397_89c5_50fd4bb54dba.slice/crio-b5a5df0468822d57dfe95c0d7dedaa1b24a0a67960fe5e3f5dfa1072543cb0b9 WatchSource:0}: Error finding container b5a5df0468822d57dfe95c0d7dedaa1b24a0a67960fe5e3f5dfa1072543cb0b9: Status 404 returned error can't find the container with id b5a5df0468822d57dfe95c0d7dedaa1b24a0a67960fe5e3f5dfa1072543cb0b9 Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.954241 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twgmg\" (UniqueName: \"kubernetes.io/projected/e5a59f7f-625d-436b-98ab-a46920a34fe8-kube-api-access-twgmg\") pod \"service-ca-operator-777779d784-wnqxg\" (UID: \"e5a59f7f-625d-436b-98ab-a46920a34fe8\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.966019 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.974619 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.977131 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxz89\" (UniqueName: \"kubernetes.io/projected/3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5-kube-api-access-zxz89\") pod \"ingress-canary-vbs4j\" (UID: \"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5\") " pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.993405 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" Oct 03 12:52:36 crc kubenswrapper[4868]: I1003 12:52:36.994596 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9krfm\" (UniqueName: \"kubernetes.io/projected/2677264c-dfc3-41b9-af63-bcaab6205cdd-kube-api-access-9krfm\") pod \"cluster-image-registry-operator-dc59b4c8b-lx48z\" (UID: \"2677264c-dfc3-41b9-af63-bcaab6205cdd\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.002299 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.006238 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ch7r\" (UniqueName: \"kubernetes.io/projected/9daf470c-0dc6-4f9b-b5ce-1f0438d40396-kube-api-access-8ch7r\") pod \"dns-default-775r2\" (UID: \"9daf470c-0dc6-4f9b-b5ce-1f0438d40396\") " pod="openshift-dns/dns-default-775r2" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.011913 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.022155 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.033701 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.034102 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.534090416 +0000 UTC m=+153.743939482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.074330 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.078860 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" Oct 03 12:52:37 crc kubenswrapper[4868]: W1003 12:52:37.093810 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4f0c0c7c_c0dd_4fdd_bc3d_906a7b512fe8.slice/crio-a298355b240f17d293769cd02c74ee6c6de535ee6ea48920b8a2045e153109a4 WatchSource:0}: Error finding container a298355b240f17d293769cd02c74ee6c6de535ee6ea48920b8a2045e153109a4: Status 404 returned error can't find the container with id a298355b240f17d293769cd02c74ee6c6de535ee6ea48920b8a2045e153109a4 Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.095870 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.105485 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-775r2" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.137653 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.138087 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.638042472 +0000 UTC m=+153.847891538 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.144626 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-vbs4j" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.190270 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-pkh4z"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.233559 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.241137 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.241544 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.741531345 +0000 UTC m=+153.951380411 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: W1003 12:52:37.275729 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4538295a_b047_4bba_999b_9d35082ad0c5.slice/crio-2e9275af8cf3eccfc8cfe5c3651dc7a0160eda2fc465ed874e1ffb08946a468f WatchSource:0}: Error finding container 2e9275af8cf3eccfc8cfe5c3651dc7a0160eda2fc465ed874e1ffb08946a468f: Status 404 returned error can't find the container with id 2e9275af8cf3eccfc8cfe5c3651dc7a0160eda2fc465ed874e1ffb08946a468f Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.283627 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.331424 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" event={"ID":"b8395931-a0eb-4b94-a5e8-f58c3aea5992","Type":"ContainerStarted","Data":"f1b64db14dfb66dc86b5a2a4f5b37c9fd8ca47bafead0799e255c0e497b7c0df"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.349297 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" event={"ID":"8fb971e6-b551-4410-aea5-5a8286a03737","Type":"ContainerStarted","Data":"2b3a3c4cd3554ac2ef6f2fc31cca19bc150b0e35c48a65149a7365882218b31d"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.349374 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.349376 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" event={"ID":"8fb971e6-b551-4410-aea5-5a8286a03737","Type":"ContainerStarted","Data":"5c8e3c8d4b28cfd4304506f041d7d8432161f8515f15d068d3082086124200f2"} Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.350151 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.850112595 +0000 UTC m=+154.059961781 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.381712 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" event={"ID":"b0ce17a1-24a9-4a8a-93ef-adf14c62816f","Type":"ContainerStarted","Data":"cc8071e3fb03d27be4a2e51aabf0af1b7ab8df5f2e3b796718ff9c7a0f012610"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.381769 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" event={"ID":"b0ce17a1-24a9-4a8a-93ef-adf14c62816f","Type":"ContainerStarted","Data":"609fba698fe275271a62c0ad0843888c2094fd4c10b2a78ae83a33c88db2565c"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.381785 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" event={"ID":"b0ce17a1-24a9-4a8a-93ef-adf14c62816f","Type":"ContainerStarted","Data":"ddeabea31f5e81b66233a9cad861d9b65d3a39c490a8b53f383657e0c5f4de57"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.388201 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.396035 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cdsl9"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.403795 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.407873 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.450604 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.452172 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:37.952160829 +0000 UTC m=+154.162009895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.465326 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" event={"ID":"587fdffe-2a05-4308-b88b-48837d1f256d","Type":"ContainerStarted","Data":"730638abc3cb3855b2bfc8377dff92c7533aa17ceedeaf0ecca44888d9325d56"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.465385 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.465398 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" event={"ID":"587fdffe-2a05-4308-b88b-48837d1f256d","Type":"ContainerStarted","Data":"48895f43d6c47f24aeee7064e9f044e9d3c560211dfecbcba769fe668cca6981"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.472548 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-z5zzt" event={"ID":"38ba9fb1-abc5-45a3-b878-46d32e466672","Type":"ContainerStarted","Data":"dc4337bb1cc4a3257d55db85a9a5da507e75ecbaa8d9145137f6336ffbe01253"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.472596 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-z5zzt" event={"ID":"38ba9fb1-abc5-45a3-b878-46d32e466672","Type":"ContainerStarted","Data":"6fe8fe266e44cd4794b406e21aef63ccccf3cc2b061073b8079e4399b281765d"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.473039 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-z5zzt" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.487022 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qjpnr"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.492848 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wwvt7" event={"ID":"2c0fb16a-559e-47e9-98f3-54563b3bddf4","Type":"ContainerStarted","Data":"7e1454a699cf848754edd1323d00d558f2047f3f0289b093c113ad53d7cae6aa"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.502879 4868 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-x5wxv container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" start-of-body= Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.502961 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" podUID="587fdffe-2a05-4308-b88b-48837d1f256d" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.21:5443/healthz\": dial tcp 10.217.0.21:5443: connect: connection refused" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.527212 4868 patch_prober.go:28] interesting pod/downloads-7954f5f757-z5zzt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.527296 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z5zzt" podUID="38ba9fb1-abc5-45a3-b878-46d32e466672" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.529648 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-czpdv"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.551616 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.552923 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.553771 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.05372752 +0000 UTC m=+154.263576596 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.556431 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.557820 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" event={"ID":"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd","Type":"ContainerStarted","Data":"eaced265c534a8db81422a6909c9ebfc8c2142ecff364b5a442731a32883d3be"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.557884 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" event={"ID":"1ad1cf81-62ab-48c3-af04-6dd5b63b46dd","Type":"ContainerStarted","Data":"a9c9995fed67a75937f9d2b81f5811a60684618f9182510487afc80db824f6f3"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.558185 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.559696 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.059500926 +0000 UTC m=+154.269349992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.562674 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.571808 4868 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-4zh86 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" start-of-body= Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.571911 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" podUID="1ad1cf81-62ab-48c3-af04-6dd5b63b46dd" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.30:8443/healthz\": dial tcp 10.217.0.30:8443: connect: connection refused" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.574249 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-955rh" event={"ID":"d31eb7f7-fae9-47b2-a16e-68b08da804bb","Type":"ContainerStarted","Data":"75d7ba56eb40b728e66a7c84b628453345fa0cf9fb0214e4f4faf430a32c11b5"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.574298 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-955rh" event={"ID":"d31eb7f7-fae9-47b2-a16e-68b08da804bb","Type":"ContainerStarted","Data":"aa4202ccddf820c00bc7f8035de73b12957ef3fbb53a0ad7b91fe16c7ac80099"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.593254 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" event={"ID":"5afabba8-1052-41e6-af8f-83da47c0d71a","Type":"ContainerStarted","Data":"1e7cabe1bb50a5c832e0ff3eeb767f82139ad48903919213841b7933df998795"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.593294 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" event={"ID":"5afabba8-1052-41e6-af8f-83da47c0d71a","Type":"ContainerStarted","Data":"19d2c0d06b554b303b85c7282de5f208131a11842668ee90897906d739a335d4"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.635593 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" event={"ID":"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf","Type":"ContainerStarted","Data":"90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.636660 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.641941 4868 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-6fp9h container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" start-of-body= Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.642001 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" podUID="240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": dial tcp 10.217.0.8:8443: connect: connection refused" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.642455 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" event={"ID":"4538295a-b047-4bba-999b-9d35082ad0c5","Type":"ContainerStarted","Data":"2e9275af8cf3eccfc8cfe5c3651dc7a0160eda2fc465ed874e1ffb08946a468f"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.654648 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" event={"ID":"24f70e5b-2400-4370-9c85-e41ad7f06e05","Type":"ContainerStarted","Data":"12b5aa860d54c5edf21e13932b71b429dc11746e33194d8af4235cd17bfd610c"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.654711 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" event={"ID":"24f70e5b-2400-4370-9c85-e41ad7f06e05","Type":"ContainerStarted","Data":"2895a2d3820bece351057e718fc2f6b6a3363ca23a765d11ec29f78b6bf81415"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.657200 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.661570 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.16154979 +0000 UTC m=+154.371398856 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.666649 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cxlqt" event={"ID":"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8","Type":"ContainerStarted","Data":"a298355b240f17d293769cd02c74ee6c6de535ee6ea48920b8a2045e153109a4"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.687577 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" event={"ID":"25d8d7db-4938-4621-880a-033692278381","Type":"ContainerStarted","Data":"784a7ec1dea44c246adb76ba6a115c654e06fb9fd0d52c2c9a21de50ca06b3e1"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.695661 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" event={"ID":"0c5660b3-e155-4397-89c5-50fd4bb54dba","Type":"ContainerStarted","Data":"b5a5df0468822d57dfe95c0d7dedaa1b24a0a67960fe5e3f5dfa1072543cb0b9"} Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.729038 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.758106 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6"] Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.759096 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.759361 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.25934817 +0000 UTC m=+154.469197236 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.862303 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.862486 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.362442123 +0000 UTC m=+154.572291189 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.862564 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.863096 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.36308156 +0000 UTC m=+154.572930666 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.888674 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" podStartSLOduration=131.888655416 podStartE2EDuration="2m11.888655416s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:37.857671186 +0000 UTC m=+154.067520262" watchObservedRunningTime="2025-10-03 12:52:37.888655416 +0000 UTC m=+154.098504482" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.934834 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" podStartSLOduration=132.934808313 podStartE2EDuration="2m12.934808313s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:37.929840719 +0000 UTC m=+154.139689805" watchObservedRunningTime="2025-10-03 12:52:37.934808313 +0000 UTC m=+154.144657379" Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.967037 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.969582 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.469552983 +0000 UTC m=+154.679402049 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:37 crc kubenswrapper[4868]: I1003 12:52:37.969706 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:37 crc kubenswrapper[4868]: E1003 12:52:37.970183 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.47017623 +0000 UTC m=+154.680025296 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.018670 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-955rh" podStartSLOduration=132.01865301 podStartE2EDuration="2m12.01865301s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:38.015970848 +0000 UTC m=+154.225819914" watchObservedRunningTime="2025-10-03 12:52:38.01865301 +0000 UTC m=+154.228502086" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.071042 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.071398 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.571382892 +0000 UTC m=+154.781231958 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.148303 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-dpnf7" podStartSLOduration=133.148280553 podStartE2EDuration="2m13.148280553s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:38.144971664 +0000 UTC m=+154.354820730" watchObservedRunningTime="2025-10-03 12:52:38.148280553 +0000 UTC m=+154.358129619" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.173657 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.174250 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.674232809 +0000 UTC m=+154.884081875 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.231853 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" podStartSLOduration=132.231825062 podStartE2EDuration="2m12.231825062s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:38.2112621 +0000 UTC m=+154.421111176" watchObservedRunningTime="2025-10-03 12:52:38.231825062 +0000 UTC m=+154.441674138" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.274337 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.274774 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.774757262 +0000 UTC m=+154.984606328 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.301930 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-5txl8" podStartSLOduration=133.30190745 podStartE2EDuration="2m13.30190745s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:38.292740654 +0000 UTC m=+154.502589720" watchObservedRunningTime="2025-10-03 12:52:38.30190745 +0000 UTC m=+154.511756516" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.331583 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-x6p6m" podStartSLOduration=132.331557904 podStartE2EDuration="2m12.331557904s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:38.325977404 +0000 UTC m=+154.535826500" watchObservedRunningTime="2025-10-03 12:52:38.331557904 +0000 UTC m=+154.541406970" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.387913 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.389198 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.889183158 +0000 UTC m=+155.099032224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.414889 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-ktkcc"] Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.419197 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-k8ftf" podStartSLOduration=132.419175352 podStartE2EDuration="2m12.419175352s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:38.416230862 +0000 UTC m=+154.626079928" watchObservedRunningTime="2025-10-03 12:52:38.419175352 +0000 UTC m=+154.629024428" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.497655 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.498135 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:38.998116717 +0000 UTC m=+155.207965783 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.599758 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.600121 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.10010516 +0000 UTC m=+155.309954226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.675699 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-vbs4j"] Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.705681 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.706073 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.206032998 +0000 UTC m=+155.415882064 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.775345 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" event={"ID":"89de9048-c2c6-4caf-9ed2-17eee2161f84","Type":"ContainerStarted","Data":"233df20e420a0acfbbf104c27f2e39454ab17ba0712135d93ebdedd942da9ad1"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.807831 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.808260 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.308248457 +0000 UTC m=+155.518097523 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.818428 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk"] Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.820889 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" event={"ID":"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a","Type":"ContainerStarted","Data":"e49efb6c8250748865f3ec02051baa43a7c08c7b57e1162d0cf19a5efe3849ad"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.820930 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" event={"ID":"3e67dcd0-021f-4c7e-aaa5-351df73f9f2a","Type":"ContainerStarted","Data":"2f6b8b0a232ac6dbe5f92f04fcc84c4ea468c193dafa8449f7dd7c3aefee04c5"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.844635 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" event={"ID":"45f565d0-329e-4a36-9138-91fa8da747fa","Type":"ContainerStarted","Data":"8727fa33353ebafb176cfe2ded2017a5bacf9dd252088fa9742df599483a4b40"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.857948 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" event={"ID":"45f565d0-329e-4a36-9138-91fa8da747fa","Type":"ContainerStarted","Data":"595bc797dac8123a61c614a38d4f75c35de7310510761e719a7732e82d65b332"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.860321 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp"] Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.879693 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-cxlqt" event={"ID":"4f0c0c7c-c0dd-4fdd-bc3d-906a7b512fe8","Type":"ContainerStarted","Data":"62e4abb3d51d7a59ab36b1df957eb5f561a4da7ecbc980530e3758980f734221"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.897844 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh"] Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.907232 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-wwvt7" event={"ID":"2c0fb16a-559e-47e9-98f3-54563b3bddf4","Type":"ContainerStarted","Data":"c01877013f9ed1e1ce7f2cc70e5c2968cbef30e1e7d5e5d42b2bd4f3bf4178fe"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.910518 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.910704 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.410683553 +0000 UTC m=+155.620532619 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.911283 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:38 crc kubenswrapper[4868]: E1003 12:52:38.915901 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.415876331 +0000 UTC m=+155.625725407 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.924539 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n"] Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.958558 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-hpssf"] Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.967164 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" event={"ID":"346d964e-9d9d-4175-9828-ba55c3c31778","Type":"ContainerStarted","Data":"0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.967203 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" event={"ID":"346d964e-9d9d-4175-9828-ba55c3c31778","Type":"ContainerStarted","Data":"1e0a17adcd5a8103f8531b5fa6785d1805c9b7661e253b3ff28c5e217a9da554"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.968519 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.969228 4868 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qjpnr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.969271 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.969981 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-z5zzt" podStartSLOduration=133.969956881 podStartE2EDuration="2m13.969956881s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:38.881428609 +0000 UTC m=+155.091277685" watchObservedRunningTime="2025-10-03 12:52:38.969956881 +0000 UTC m=+155.179805957" Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.990470 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" event={"ID":"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe","Type":"ContainerStarted","Data":"55b08eddb6712679a6c8b8cd1c010a74350fd46eed0ba47c8f0fe30c865d782f"} Oct 03 12:52:38 crc kubenswrapper[4868]: I1003 12:52:38.990534 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" event={"ID":"24fe616c-6ca3-4bc7-ae8d-4fca938d21fe","Type":"ContainerStarted","Data":"eef18ab77c1e68a0d4722ad1b509b4d26baacfed55edc7689e2e8cfd042c4f2e"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.006822 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-czpdv" event={"ID":"5e70f3d2-5b07-494a-8edc-dcc8e541752e","Type":"ContainerStarted","Data":"feaec386a695646d04277f44e9693bf4fc571e372288cf436c3055d147d9077e"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.006880 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-czpdv" event={"ID":"5e70f3d2-5b07-494a-8edc-dcc8e541752e","Type":"ContainerStarted","Data":"685c8b16f88ab3972cf9e572a5673089e6de3b3a446e94ed0c389dac2bd27efc"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.007468 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.010173 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.012609 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.015874 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.515849881 +0000 UTC m=+155.725698967 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.018982 4868 patch_prober.go:28] interesting pod/console-operator-58897d9998-czpdv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/readyz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.019039 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-czpdv" podUID="5e70f3d2-5b07-494a-8edc-dcc8e541752e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/readyz\": dial tcp 10.217.0.17:8443: connect: connection refused" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.024242 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" event={"ID":"9a3325c2-fabf-4db0-8d36-d6b38bc6a399","Type":"ContainerStarted","Data":"80569684c0b45c21d6c283e856cb9c1bd798a2bf6673808a819f618412a7ec16"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.049567 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" event={"ID":"e33637b6-3fe6-44bd-8d23-28d3cc691dcf","Type":"ContainerStarted","Data":"718526b98ac696b07b536e5724344a952c47c6bd9443b0db2e408527bae57dfd"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.053902 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.056202 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.058098 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.067673 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.069642 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" event={"ID":"df67ce79-06d5-4d3f-a54e-b77cad420085","Type":"ContainerStarted","Data":"566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.069682 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" event={"ID":"df67ce79-06d5-4d3f-a54e-b77cad420085","Type":"ContainerStarted","Data":"36f693b4d06422ad00ef7dad6ea15079ea5364267da01f927d8aa86dda55ecbd"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.070428 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.071250 4868 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-rql9x container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" start-of-body= Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.071288 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" podUID="df67ce79-06d5-4d3f-a54e-b77cad420085" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.34:8443/healthz\": dial tcp 10.217.0.34:8443: connect: connection refused" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.076836 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-dq7lc"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.085618 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-775r2"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.095130 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-qzzd4"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.114339 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-wwvt7" podStartSLOduration=133.114319099 podStartE2EDuration="2m13.114319099s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.049608425 +0000 UTC m=+155.259457511" watchObservedRunningTime="2025-10-03 12:52:39.114319099 +0000 UTC m=+155.324168165" Oct 03 12:52:39 crc kubenswrapper[4868]: W1003 12:52:39.117571 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod000a8020_feb7_4a25_95a7_a13fdfa39109.slice/crio-188d4adf8ec610798c32b000e67e4f48cb0ba14739d282dd682df57bbd04a7fb WatchSource:0}: Error finding container 188d4adf8ec610798c32b000e67e4f48cb0ba14739d282dd682df57bbd04a7fb: Status 404 returned error can't find the container with id 188d4adf8ec610798c32b000e67e4f48cb0ba14739d282dd682df57bbd04a7fb Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.118500 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.120144 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.620129705 +0000 UTC m=+155.829978771 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.124283 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-czpdv" podStartSLOduration=134.124266646 podStartE2EDuration="2m14.124266646s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.081176011 +0000 UTC m=+155.291025087" watchObservedRunningTime="2025-10-03 12:52:39.124266646 +0000 UTC m=+155.334115712" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.128211 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-cxlqt" podStartSLOduration=5.128195521 podStartE2EDuration="5.128195521s" podCreationTimestamp="2025-10-03 12:52:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.122596661 +0000 UTC m=+155.332445727" watchObservedRunningTime="2025-10-03 12:52:39.128195521 +0000 UTC m=+155.338044607" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.133572 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" event={"ID":"5be75ac0-b221-4c59-b9c0-67d4b77f7d86","Type":"ContainerStarted","Data":"ac8db360141b2c80e4c2e4fcae9e611554602ddc7ea317f5fbbb2b119f538ce7"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.139137 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.149174 4868 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-cdsl9 container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" start-of-body= Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.149225 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" podUID="5be75ac0-b221-4c59-b9c0-67d4b77f7d86" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.31:6443/healthz\": dial tcp 10.217.0.31:6443: connect: connection refused" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.150176 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-8ftk2" podStartSLOduration=133.15014438 podStartE2EDuration="2m13.15014438s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.149610145 +0000 UTC m=+155.359459211" watchObservedRunningTime="2025-10-03 12:52:39.15014438 +0000 UTC m=+155.359993446" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.215927 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" event={"ID":"b8395931-a0eb-4b94-a5e8-f58c3aea5992","Type":"ContainerStarted","Data":"edb9b8c934ed1338ed01ae7fb2bbd5fdd5b8075b46125705e53a8338b1d55f8b"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.216299 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" podStartSLOduration=133.216283502 podStartE2EDuration="2m13.216283502s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.177746578 +0000 UTC m=+155.387595644" watchObservedRunningTime="2025-10-03 12:52:39.216283502 +0000 UTC m=+155.426132568" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.224552 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.227398 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.727356908 +0000 UTC m=+155.937206014 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.265181 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" event={"ID":"5ac2a283-f340-4724-8304-86142053130b","Type":"ContainerStarted","Data":"bb839de4a4959710da58b0eb3f6c156ccefffc56e3149736b743f48a5a43ddb3"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.267309 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-wt988" podStartSLOduration=135.267296128 podStartE2EDuration="2m15.267296128s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.216860987 +0000 UTC m=+155.426710053" watchObservedRunningTime="2025-10-03 12:52:39.267296128 +0000 UTC m=+155.477145194" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.300246 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-bgndt" podStartSLOduration=133.300225441 podStartE2EDuration="2m13.300225441s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.269947459 +0000 UTC m=+155.479796525" watchObservedRunningTime="2025-10-03 12:52:39.300225441 +0000 UTC m=+155.510074527" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.331421 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" event={"ID":"0c5660b3-e155-4397-89c5-50fd4bb54dba","Type":"ContainerStarted","Data":"69b911eeaadf8b7ad0c2d72dcf547f749864bce876adf641f73d92bcfef0fd76"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.332555 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.352409 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.852369678 +0000 UTC m=+156.062218744 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.357532 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" podStartSLOduration=133.357510126 podStartE2EDuration="2m13.357510126s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.355521262 +0000 UTC m=+155.565370328" watchObservedRunningTime="2025-10-03 12:52:39.357510126 +0000 UTC m=+155.567359202" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.359024 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-szgx9" podStartSLOduration=135.359017076 podStartE2EDuration="2m15.359017076s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.299381148 +0000 UTC m=+155.509230214" watchObservedRunningTime="2025-10-03 12:52:39.359017076 +0000 UTC m=+155.568866152" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.396670 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" event={"ID":"25d8d7db-4938-4621-880a-033692278381","Type":"ContainerStarted","Data":"8415a1774295668288e77081abb313f0d1ba4d370d744de24a3b856522032f36"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.396678 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" podStartSLOduration=135.396661965 podStartE2EDuration="2m15.396661965s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.39644298 +0000 UTC m=+155.606292076" watchObservedRunningTime="2025-10-03 12:52:39.396661965 +0000 UTC m=+155.606511031" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.434223 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.435265 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:39.935250119 +0000 UTC m=+156.145099185 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.456459 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" event={"ID":"997a6d06-ce81-4866-9055-04d6ff8c635f","Type":"ContainerStarted","Data":"d68bf59d27a238133ddeb3026fea1410abad4b7c9aec94466c9837b5b47f1b6b"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.484755 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" event={"ID":"4538295a-b047-4bba-999b-9d35082ad0c5","Type":"ContainerStarted","Data":"43b97a1de3336b817243fdb09fa188057eef810496378add046f09167d505c34"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.484801 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" event={"ID":"4538295a-b047-4bba-999b-9d35082ad0c5","Type":"ContainerStarted","Data":"7e84e40594e10e44cd11e40b85278717c11a58dfb725db671df21ae8e4d802bd"} Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.493956 4868 patch_prober.go:28] interesting pod/downloads-7954f5f757-z5zzt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.497269 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z5zzt" podUID="38ba9fb1-abc5-45a3-b878-46d32e466672" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.501567 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4zh86" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.510220 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" podStartSLOduration=133.510200908 podStartE2EDuration="2m13.510200908s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.502438119 +0000 UTC m=+155.712287185" watchObservedRunningTime="2025-10-03 12:52:39.510200908 +0000 UTC m=+155.720049974" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.511119 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-q8vcv"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.512343 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.518346 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-x5wxv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.519359 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.535719 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.536046 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-catalog-content\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.536292 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxpbb\" (UniqueName: \"kubernetes.io/projected/f74dd718-f443-4005-a447-f2384a2f218d-kube-api-access-fxpbb\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.536335 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-utilities\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.536582 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.546288 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-tr6fl" podStartSLOduration=134.546268264 podStartE2EDuration="2m14.546268264s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.542917064 +0000 UTC m=+155.752766130" watchObservedRunningTime="2025-10-03 12:52:39.546268264 +0000 UTC m=+155.756117330" Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.548506 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.048483753 +0000 UTC m=+156.258332899 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.621581 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-pkh4z" podStartSLOduration=133.621561951 podStartE2EDuration="2m13.621561951s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.61853925 +0000 UTC m=+155.828388316" watchObservedRunningTime="2025-10-03 12:52:39.621561951 +0000 UTC m=+155.831411017" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.643587 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.643821 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-catalog-content\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.644044 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.144027723 +0000 UTC m=+156.353876789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.644459 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-catalog-content\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.643861 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxpbb\" (UniqueName: \"kubernetes.io/projected/f74dd718-f443-4005-a447-f2384a2f218d-kube-api-access-fxpbb\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.646137 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-utilities\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.646251 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.647749 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.147735973 +0000 UTC m=+156.357585049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.650507 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-utilities\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.658066 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q8vcv"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.672369 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" podStartSLOduration=133.672343723 podStartE2EDuration="2m13.672343723s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:39.652562862 +0000 UTC m=+155.862411928" watchObservedRunningTime="2025-10-03 12:52:39.672343723 +0000 UTC m=+155.882192789" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.721812 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9gtqf"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.722704 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.737982 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxpbb\" (UniqueName: \"kubernetes.io/projected/f74dd718-f443-4005-a447-f2384a2f218d-kube-api-access-fxpbb\") pod \"certified-operators-q8vcv\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.740790 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9gtqf"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.740933 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.756043 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.756233 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-catalog-content\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.756270 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-utilities\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.756350 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jz75\" (UniqueName: \"kubernetes.io/projected/c39a6e17-1832-4321-a2a1-35adc3dd841b-kube-api-access-8jz75\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.756468 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.256453357 +0000 UTC m=+156.466302433 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.756504 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.774286 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:39 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:39 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:39 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.774350 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.841374 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.859767 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jz75\" (UniqueName: \"kubernetes.io/projected/c39a6e17-1832-4321-a2a1-35adc3dd841b-kube-api-access-8jz75\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.859827 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.859863 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-catalog-content\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.859889 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-utilities\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.860575 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-utilities\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.860993 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.360978907 +0000 UTC m=+156.570827973 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.861255 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-catalog-content\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.954011 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jz75\" (UniqueName: \"kubernetes.io/projected/c39a6e17-1832-4321-a2a1-35adc3dd841b-kube-api-access-8jz75\") pod \"community-operators-9gtqf\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.967251 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:39 crc kubenswrapper[4868]: E1003 12:52:39.967977 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.467954884 +0000 UTC m=+156.677803950 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.983160 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wch8d"] Oct 03 12:52:39 crc kubenswrapper[4868]: I1003 12:52:39.993880 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.057365 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wch8d"] Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.084868 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-utilities\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.084940 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.084968 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-catalog-content\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.085080 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5kjj\" (UniqueName: \"kubernetes.io/projected/1d93bd3f-1d60-4b09-9e89-4db1174876c1-kube-api-access-g5kjj\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.085441 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.585426352 +0000 UTC m=+156.795275418 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.111046 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-qw72n"] Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.112876 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.167612 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qw72n"] Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.194761 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.195005 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-utilities\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.195076 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-catalog-content\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.195125 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-catalog-content\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.195242 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.695209544 +0000 UTC m=+156.905058620 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.195378 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pt2lf\" (UniqueName: \"kubernetes.io/projected/3bd6783c-cafd-4db5-b6df-ae558e765bd4-kube-api-access-pt2lf\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.195436 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5kjj\" (UniqueName: \"kubernetes.io/projected/1d93bd3f-1d60-4b09-9e89-4db1174876c1-kube-api-access-g5kjj\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.195481 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-utilities\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.195835 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-catalog-content\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.196005 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-utilities\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.255979 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5kjj\" (UniqueName: \"kubernetes.io/projected/1d93bd3f-1d60-4b09-9e89-4db1174876c1-kube-api-access-g5kjj\") pod \"certified-operators-wch8d\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.305937 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-catalog-content\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.305979 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.306067 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pt2lf\" (UniqueName: \"kubernetes.io/projected/3bd6783c-cafd-4db5-b6df-ae558e765bd4-kube-api-access-pt2lf\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.306101 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-utilities\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.306595 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.806579268 +0000 UTC m=+157.016428334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.307451 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-utilities\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.307614 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-catalog-content\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.362341 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.366547 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pt2lf\" (UniqueName: \"kubernetes.io/projected/3bd6783c-cafd-4db5-b6df-ae558e765bd4-kube-api-access-pt2lf\") pod \"community-operators-qw72n\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.416627 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.417181 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:40.917137351 +0000 UTC m=+157.126986417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.518913 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.519561 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.019537905 +0000 UTC m=+157.229386971 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.546175 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" event={"ID":"25d8d7db-4938-4621-880a-033692278381","Type":"ContainerStarted","Data":"8ab814660237ba92400a06a46e24834eb4f3141b0ef8eb96d4bf6b2fd10fac21"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.605548 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-c5q8n" podStartSLOduration=136.605522868 podStartE2EDuration="2m16.605522868s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:40.590169147 +0000 UTC m=+156.800018213" watchObservedRunningTime="2025-10-03 12:52:40.605522868 +0000 UTC m=+156.815371944" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.620217 4868 generic.go:334] "Generic (PLEG): container finished" podID="5ac2a283-f340-4724-8304-86142053130b" containerID="b384ca8618ba6dd480f34fb71e652f0648f30af5cc2e68edb42b5f4d0064bf3c" exitCode=0 Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.620857 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.621118 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.121097345 +0000 UTC m=+157.330946411 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.621284 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.621668 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.121656531 +0000 UTC m=+157.331505597 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.673040 4868 generic.go:334] "Generic (PLEG): container finished" podID="9a3325c2-fabf-4db0-8d36-d6b38bc6a399" containerID="269e45a5ba90a8e965399b042005bc2e99ee06cda6120b0aafb076d907d6aab9" exitCode=0 Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.683166 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-dq7lc" podStartSLOduration=135.683143319 podStartE2EDuration="2m15.683143319s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:40.675098063 +0000 UTC m=+156.884947129" watchObservedRunningTime="2025-10-03 12:52:40.683143319 +0000 UTC m=+156.892992385" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.722637 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.724162 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.224137767 +0000 UTC m=+157.433986893 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.757503 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:40 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:40 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:40 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.757558 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.777322 4868 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-w7bhp container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" start-of-body= Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.777371 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" podUID="b1cb9fb8-f769-4afa-86f4-6b5e240e92b3" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.27:8443/healthz\": dial tcp 10.217.0.27:8443: connect: connection refused" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.782610 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" podStartSLOduration=134.782587073 podStartE2EDuration="2m14.782587073s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:40.727865646 +0000 UTC m=+156.937714712" watchObservedRunningTime="2025-10-03 12:52:40.782587073 +0000 UTC m=+156.992436139" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.828572 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.829856 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.329842119 +0000 UTC m=+157.539691185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.846806 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" podStartSLOduration=134.846789443 podStartE2EDuration="2m14.846789443s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:40.83137797 +0000 UTC m=+157.041227036" watchObservedRunningTime="2025-10-03 12:52:40.846789443 +0000 UTC m=+157.056638509" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871840 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dq7lc" event={"ID":"a4cfd01a-748d-42ec-9d69-bdf306168942","Type":"ContainerStarted","Data":"bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871874 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dq7lc" event={"ID":"a4cfd01a-748d-42ec-9d69-bdf306168942","Type":"ContainerStarted","Data":"f609dc3aee397a55645af6e9ebbad797b3acb487897a1a5bc4bc2c4d27fccff2"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871902 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871925 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" event={"ID":"e5a59f7f-625d-436b-98ab-a46920a34fe8","Type":"ContainerStarted","Data":"edb69cdad3ff87f6fb88958c335d0e6adf9c388127152c25f5b97b21b6961984"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871942 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" event={"ID":"5ac2a283-f340-4724-8304-86142053130b","Type":"ContainerDied","Data":"b384ca8618ba6dd480f34fb71e652f0648f30af5cc2e68edb42b5f4d0064bf3c"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871953 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" event={"ID":"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66","Type":"ContainerStarted","Data":"40df6eb103af8eeb6add409354ca3be5e73823aab493e9b29dfe1a821ac0c53d"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871962 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" event={"ID":"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66","Type":"ContainerStarted","Data":"648ef6c14cb2e23401a045fdae28306488510e738a599311d38cbd1913e0dc5c"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871971 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" event={"ID":"2677264c-dfc3-41b9-af63-bcaab6205cdd","Type":"ContainerStarted","Data":"e91c86563dc3d8d4555ed8a149e3cdcf674d641af1265d39717b1ed6910cb451"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871980 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" event={"ID":"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3","Type":"ContainerStarted","Data":"93baee7adc8e9db80dcd0657442ef12c49310b2a3b0810303c9b4b963dd2c958"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871989 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" event={"ID":"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3","Type":"ContainerStarted","Data":"60de94eb3c464e27ac963f7807367cd7f9b6c3a7ac41e8239016c09d53d61060"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.871998 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" event={"ID":"9a3325c2-fabf-4db0-8d36-d6b38bc6a399","Type":"ContainerDied","Data":"269e45a5ba90a8e965399b042005bc2e99ee06cda6120b0aafb076d907d6aab9"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872010 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" event={"ID":"5be75ac0-b221-4c59-b9c0-67d4b77f7d86","Type":"ContainerStarted","Data":"7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872019 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" event={"ID":"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3","Type":"ContainerStarted","Data":"c473f7c1f49a0cf277b260293ea72144a039f656cbdfcad783770781c4bded6c"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872028 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" event={"ID":"b1cb9fb8-f769-4afa-86f4-6b5e240e92b3","Type":"ContainerStarted","Data":"5ec1b3f0f2ec256e45aea9b5d07e375ae57f0634a14fbd2d5a2b497855e9c9d9"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872036 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" event={"ID":"000a8020-feb7-4a25-95a7-a13fdfa39109","Type":"ContainerStarted","Data":"188d4adf8ec610798c32b000e67e4f48cb0ba14739d282dd682df57bbd04a7fb"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872045 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" event={"ID":"b0eaa220-caf8-40a2-9587-fd4a6bd4f535","Type":"ContainerStarted","Data":"5937c1994ede81e8dd74e71836f1329a7dc740ea2c44d74caa6396e1dd5fd02f"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872075 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" event={"ID":"b0eaa220-caf8-40a2-9587-fd4a6bd4f535","Type":"ContainerStarted","Data":"fc073beb9a6f06a4d5e3f3a9265aca3a6c36a438d6f0ae7174b01921168f69de"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872086 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" event={"ID":"8586186d-7b44-4899-8eae-82c717ea38f5","Type":"ContainerStarted","Data":"5ed40749bae5b671a7168700a259fa81bff4580540aeb22468d8e358918fd4bc"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872095 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" event={"ID":"ba635e01-f221-477f-b2d0-ada4a6473f58","Type":"ContainerStarted","Data":"0acc66058cdfa69dea1563d8fe52ac35e8f382ede6b21c26538626091ed57629"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.872104 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" event={"ID":"ba635e01-f221-477f-b2d0-ada4a6473f58","Type":"ContainerStarted","Data":"25805a4bca8820a1a22f4f4c152ee15478f2d1750ee1d48d0f0788d29bb7d3d6"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.890584 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p2w2n" podStartSLOduration=134.890561027 podStartE2EDuration="2m14.890561027s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:40.88736733 +0000 UTC m=+157.097216396" watchObservedRunningTime="2025-10-03 12:52:40.890561027 +0000 UTC m=+157.100410103" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.905225 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vbs4j" event={"ID":"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5","Type":"ContainerStarted","Data":"4722966112b7152b5a7af9af1a9c1244283209c6d69b0db694013c268b874fcd"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.905261 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-vbs4j" event={"ID":"3aa758e6-7b2d-4dc3-b1d2-d74a58e5ccc5","Type":"ContainerStarted","Data":"e656de7c4e4918aa8a87868254ce4c2e67bfad37bcd22fe4ecf3b15a4e50526f"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.928980 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" event={"ID":"1c441e29-000c-4055-b932-6d70f2f4d82e","Type":"ContainerStarted","Data":"018c649595178604493c3749b9fe91defeccd1ee704e6545507a27c03518b34b"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.929038 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" event={"ID":"1c441e29-000c-4055-b932-6d70f2f4d82e","Type":"ContainerStarted","Data":"a70896dde6b2258a61f013654c117384102dd56f994b938bbf5f43cde7a7943d"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.932422 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:40 crc kubenswrapper[4868]: E1003 12:52:40.933269 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.43324991 +0000 UTC m=+157.643098976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.944336 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-vtsbm" event={"ID":"997a6d06-ce81-4866-9055-04d6ff8c635f","Type":"ContainerStarted","Data":"9ee2d036115f22f7d07f1b98616a1f40ee3bb6e64dd34570845d1d7eabfa5d7a"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.945565 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.954771 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.970805 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" event={"ID":"3ec322eb-b57f-4ba6-a635-b023501af402","Type":"ContainerStarted","Data":"b58b67a00c43ff67c5ad34abd9a0e2b72bf24167f6280b48be86e0f25360bf08"} Oct 03 12:52:40 crc kubenswrapper[4868]: I1003 12:52:40.984486 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-5j9fw" event={"ID":"e33637b6-3fe6-44bd-8d23-28d3cc691dcf","Type":"ContainerStarted","Data":"e0aade29e5b5ed5446bcb7e65e2d93fc842af22ff3f30167ae4ba3ee3b226445"} Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.035584 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.038353 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-775r2" event={"ID":"9daf470c-0dc6-4f9b-b5ce-1f0438d40396","Type":"ContainerStarted","Data":"cee3a1f4dc39e04f1e6ba5b94a6ae200fa7a2e27fd9eb91ac9949470c6ca7559"} Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.045264 4868 patch_prober.go:28] interesting pod/console-operator-58897d9998-czpdv container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.17:8443/readyz\": dial tcp 10.217.0.17:8443: connect: connection refused" start-of-body= Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.045373 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-czpdv" podUID="5e70f3d2-5b07-494a-8edc-dcc8e541752e" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.17:8443/readyz\": dial tcp 10.217.0.17:8443: connect: connection refused" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.071185 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.071362 4868 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qjpnr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.075570 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.079208 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.579189721 +0000 UTC m=+157.789038787 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.119456 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-vbs4j" podStartSLOduration=8.119436689 podStartE2EDuration="8.119436689s" podCreationTimestamp="2025-10-03 12:52:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:40.954424087 +0000 UTC m=+157.164273153" watchObservedRunningTime="2025-10-03 12:52:41.119436689 +0000 UTC m=+157.329285765" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.163744 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.165252 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.665231677 +0000 UTC m=+157.875080743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.261974 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-q8vcv"] Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.266674 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.267216 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.767201119 +0000 UTC m=+157.977050185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.367915 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.368697 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.868673648 +0000 UTC m=+158.078522714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.465100 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9gtqf"] Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.471566 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.471929 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:41.971918015 +0000 UTC m=+158.181767081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.516592 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7d8ll"] Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.526078 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.537419 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.571984 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7d8ll"] Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.572483 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.572648 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.072626484 +0000 UTC m=+158.282475550 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.572800 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.573124 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.073112816 +0000 UTC m=+158.282961882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.588508 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.672331 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-qw72n"] Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.673578 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.673754 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtqlp\" (UniqueName: \"kubernetes.io/projected/c739901a-792b-4b7b-958d-5fcba129ff22-kube-api-access-jtqlp\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.673784 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-utilities\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.673864 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.173834775 +0000 UTC m=+158.383683841 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.674027 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-catalog-content\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.761377 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:41 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:41 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:41 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.761425 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.778799 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtqlp\" (UniqueName: \"kubernetes.io/projected/c739901a-792b-4b7b-958d-5fcba129ff22-kube-api-access-jtqlp\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.778842 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-utilities\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.778873 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.778900 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-catalog-content\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.779375 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-catalog-content\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.779822 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-utilities\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.780070 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.280042741 +0000 UTC m=+158.489891807 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.857210 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtqlp\" (UniqueName: \"kubernetes.io/projected/c739901a-792b-4b7b-958d-5fcba129ff22-kube-api-access-jtqlp\") pod \"redhat-marketplace-7d8ll\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.873473 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.880211 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.880513 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.380498814 +0000 UTC m=+158.590347880 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.940524 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-729j7"] Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.941831 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.976130 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-729j7"] Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.990994 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-utilities\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.991260 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtxhx\" (UniqueName: \"kubernetes.io/projected/c0e0771c-be02-4e3d-933d-707dc8a76351-kube-api-access-rtxhx\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.991475 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:41 crc kubenswrapper[4868]: I1003 12:52:41.991628 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-catalog-content\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:41 crc kubenswrapper[4868]: E1003 12:52:41.992044 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.492032442 +0000 UTC m=+158.701881508 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.039823 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wch8d"] Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.097405 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.097910 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-utilities\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.097937 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtxhx\" (UniqueName: \"kubernetes.io/projected/c0e0771c-be02-4e3d-933d-707dc8a76351-kube-api-access-rtxhx\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.098030 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-catalog-content\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.098807 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-catalog-content\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.098892 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.598876595 +0000 UTC m=+158.808725661 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.099119 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-utilities\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.156193 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtxhx\" (UniqueName: \"kubernetes.io/projected/c0e0771c-be02-4e3d-933d-707dc8a76351-kube-api-access-rtxhx\") pod \"redhat-marketplace-729j7\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.160122 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" event={"ID":"1c441e29-000c-4055-b932-6d70f2f4d82e","Type":"ContainerStarted","Data":"de10cae756c9dca838ab67a334e29966fb3ae57f9e737d595108c2a9705adddd"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.203142 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.204421 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.704408793 +0000 UTC m=+158.914257859 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.230792 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.230828 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" event={"ID":"f55f0e23-45aa-4f38-8fe9-125ddaa6ae66","Type":"ContainerStarted","Data":"510685e900ed48c5488896a3305722e8420304f4276a5c477addf664621e6343"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.278185 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" event={"ID":"9a3325c2-fabf-4db0-8d36-d6b38bc6a399","Type":"ContainerStarted","Data":"b62eb137f234b631930600bec774edd48b3540d40a752c244e3946252b9ace5e"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.279088 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.283007 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-6j5bh" podStartSLOduration=136.282993799 podStartE2EDuration="2m16.282993799s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:42.231224402 +0000 UTC m=+158.441073478" watchObservedRunningTime="2025-10-03 12:52:42.282993799 +0000 UTC m=+158.492842875" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.287677 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9gtqf" event={"ID":"c39a6e17-1832-4321-a2a1-35adc3dd841b","Type":"ContainerStarted","Data":"4bbdc50fecbb16e9a138f25444f1cd6605ebe5783ade2a9c46c23952c7a3349b"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.307005 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.307163 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.807140816 +0000 UTC m=+159.016989882 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.307371 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.307664 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.80765567 +0000 UTC m=+159.017504736 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.309649 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8vcv" event={"ID":"f74dd718-f443-4005-a447-f2384a2f218d","Type":"ContainerStarted","Data":"c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.309720 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8vcv" event={"ID":"f74dd718-f443-4005-a447-f2384a2f218d","Type":"ContainerStarted","Data":"ab11c3f591500723771acfca7c746e149f43663bdadccb724c87d98c66d60576"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.317884 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" podStartSLOduration=137.317870393 podStartE2EDuration="2m17.317870393s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:42.317606977 +0000 UTC m=+158.527456043" watchObservedRunningTime="2025-10-03 12:52:42.317870393 +0000 UTC m=+158.527719469" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.320685 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" podStartSLOduration=136.320678089 podStartE2EDuration="2m16.320678089s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:42.280543713 +0000 UTC m=+158.490392809" watchObservedRunningTime="2025-10-03 12:52:42.320678089 +0000 UTC m=+158.530527155" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.342865 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-775r2" event={"ID":"9daf470c-0dc6-4f9b-b5ce-1f0438d40396","Type":"ContainerStarted","Data":"3c7725a06d1a677ba0112192257db162d33e7577f034c42efc078dc68dcd0b5c"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.344609 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" event={"ID":"8586186d-7b44-4899-8eae-82c717ea38f5","Type":"ContainerStarted","Data":"d8ebea24a375db6fd1aa46eaa01babef7c8117e77bf691af5c748644bfba26cd"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.347640 4868 generic.go:334] "Generic (PLEG): container finished" podID="000a8020-feb7-4a25-95a7-a13fdfa39109" containerID="6c74398b781ac1c46246c8ecb1d712034c79785ffa34e1df9a69c1d0f2c3a988" exitCode=0 Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.347714 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" event={"ID":"000a8020-feb7-4a25-95a7-a13fdfa39109","Type":"ContainerDied","Data":"6c74398b781ac1c46246c8ecb1d712034c79785ffa34e1df9a69c1d0f2c3a988"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.351341 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.357765 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" event={"ID":"3ec322eb-b57f-4ba6-a635-b023501af402","Type":"ContainerStarted","Data":"e90dc4279714b03ffc11fd3c9713019a7a3727d92e42e94301b794292549199e"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.362950 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw72n" event={"ID":"3bd6783c-cafd-4db5-b6df-ae558e765bd4","Type":"ContainerStarted","Data":"254c7bb820fc15ba1f6b562245fd48258ab1b73cd67fe6171894408b5b533718"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.373043 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" event={"ID":"5ac2a283-f340-4724-8304-86142053130b","Type":"ContainerStarted","Data":"6e8c90e77ff29ff933e78ac4dd2f36cecda087029e1cd117db4c919b65be5f5d"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.377552 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" event={"ID":"ba635e01-f221-477f-b2d0-ada4a6473f58","Type":"ContainerStarted","Data":"ee5f32fe7b49912b54875971d79e86c326fdba7b07a5e57d16e1e123f0ba79af"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.379670 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" event={"ID":"89de9048-c2c6-4caf-9ed2-17eee2161f84","Type":"ContainerStarted","Data":"58f3cc28287c390ae5bd9b860305a9c894c274ebf9d63243ae2a0f3fe7d60497"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.384446 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" event={"ID":"2677264c-dfc3-41b9-af63-bcaab6205cdd","Type":"ContainerStarted","Data":"868fe5518c9f42a14dd36bccbff7c8da20b1d364ad80cc1e183debf73bfca8bc"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.408586 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.409297 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:42.909266612 +0000 UTC m=+159.119115688 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.418262 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" event={"ID":"e5a59f7f-625d-436b-98ab-a46920a34fe8","Type":"ContainerStarted","Data":"f2b938fde3426a573b4618c5ad970df194f5a4b5f68afb55857e2c58d16dc964"} Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.426036 4868 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-qjpnr container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" start-of-body= Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.426098 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.41:8080/healthz\": dial tcp 10.217.0.41:8080: connect: connection refused" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.432369 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-w7bhp" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.439116 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-lx48z" podStartSLOduration=137.439095962 podStartE2EDuration="2m17.439095962s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:42.438612569 +0000 UTC m=+158.648461645" watchObservedRunningTime="2025-10-03 12:52:42.439095962 +0000 UTC m=+158.648945028" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.516317 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.531924 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" podStartSLOduration=136.531905079 podStartE2EDuration="2m16.531905079s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:42.484835107 +0000 UTC m=+158.694684173" watchObservedRunningTime="2025-10-03 12:52:42.531905079 +0000 UTC m=+158.741754155" Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.539446 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.03943024 +0000 UTC m=+159.249279306 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.568931 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-dzlbk" podStartSLOduration=136.5689127 podStartE2EDuration="2m16.5689127s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:42.535844645 +0000 UTC m=+158.745693711" watchObservedRunningTime="2025-10-03 12:52:42.5689127 +0000 UTC m=+158.778761776" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.610468 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-wnqxg" podStartSLOduration=136.610440733 podStartE2EDuration="2m16.610440733s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:42.60920658 +0000 UTC m=+158.819055646" watchObservedRunningTime="2025-10-03 12:52:42.610440733 +0000 UTC m=+158.820289799" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.618418 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.618722 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.118706435 +0000 UTC m=+159.328555501 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.698425 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7d8ll"] Oct 03 12:52:42 crc kubenswrapper[4868]: W1003 12:52:42.719161 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc739901a_792b_4b7b_958d_5fcba129ff22.slice/crio-dab8721334ec9901c83db76f3dcbc5a3d534819ab172c2f6fb793df9dac8ebe5 WatchSource:0}: Error finding container dab8721334ec9901c83db76f3dcbc5a3d534819ab172c2f6fb793df9dac8ebe5: Status 404 returned error can't find the container with id dab8721334ec9901c83db76f3dcbc5a3d534819ab172c2f6fb793df9dac8ebe5 Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.719875 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.720179 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.220166943 +0000 UTC m=+159.430016009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.769820 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:42 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:42 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:42 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.769900 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.821138 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.821668 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.321650513 +0000 UTC m=+159.531499579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.911241 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-czpdv" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.922317 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:42 crc kubenswrapper[4868]: E1003 12:52:42.922679 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.42266663 +0000 UTC m=+159.632515696 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.927433 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-fcf5t"] Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.939128 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.954633 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 12:52:42 crc kubenswrapper[4868]: I1003 12:52:42.998811 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fcf5t"] Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.020907 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-729j7"] Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.025739 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.026149 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-utilities\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.026264 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d89dh\" (UniqueName: \"kubernetes.io/projected/9725f594-2bef-441e-9407-8712be581aa9-kube-api-access-d89dh\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.026302 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-catalog-content\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.026434 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.52641241 +0000 UTC m=+159.736261486 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.132091 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.132632 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.632596296 +0000 UTC m=+159.842445372 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.132875 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d89dh\" (UniqueName: \"kubernetes.io/projected/9725f594-2bef-441e-9407-8712be581aa9-kube-api-access-d89dh\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.132917 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-catalog-content\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.133385 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-utilities\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.133716 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-catalog-content\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.133817 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-utilities\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.168107 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d89dh\" (UniqueName: \"kubernetes.io/projected/9725f594-2bef-441e-9407-8712be581aa9-kube-api-access-d89dh\") pod \"redhat-operators-fcf5t\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.236073 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.236242 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.736215032 +0000 UTC m=+159.946064098 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.236333 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.236815 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.736807818 +0000 UTC m=+159.946656884 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.316581 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-clvll"] Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.317861 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.337013 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.337444 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.837428094 +0000 UTC m=+160.047277160 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.350927 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-clvll"] Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.410622 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.438372 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-utilities\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.438429 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-catalog-content\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.438577 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.438659 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vp66n\" (UniqueName: \"kubernetes.io/projected/c8bc4ae7-ab51-41fd-b455-acbf54499fde-kube-api-access-vp66n\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.438973 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:43.938958945 +0000 UTC m=+160.148808011 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.440286 4868 generic.go:334] "Generic (PLEG): container finished" podID="b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" containerID="93baee7adc8e9db80dcd0657442ef12c49310b2a3b0810303c9b4b963dd2c958" exitCode=0 Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.440372 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" event={"ID":"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3","Type":"ContainerDied","Data":"93baee7adc8e9db80dcd0657442ef12c49310b2a3b0810303c9b4b963dd2c958"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.459895 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-729j7" event={"ID":"c0e0771c-be02-4e3d-933d-707dc8a76351","Type":"ContainerStarted","Data":"1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.459939 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-729j7" event={"ID":"c0e0771c-be02-4e3d-933d-707dc8a76351","Type":"ContainerStarted","Data":"82370337aa45a78f59937c3502e83b74c63ae61df5549425d5842a7e7186ced4"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.466678 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.470684 4868 generic.go:334] "Generic (PLEG): container finished" podID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerID="79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12" exitCode=0 Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.470752 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9gtqf" event={"ID":"c39a6e17-1832-4321-a2a1-35adc3dd841b","Type":"ContainerDied","Data":"79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.491159 4868 generic.go:334] "Generic (PLEG): container finished" podID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerID="7505de666c7700d063890d3ef278f14389978e29ccdead4441b6a3d315590499" exitCode=0 Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.491256 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wch8d" event={"ID":"1d93bd3f-1d60-4b09-9e89-4db1174876c1","Type":"ContainerDied","Data":"7505de666c7700d063890d3ef278f14389978e29ccdead4441b6a3d315590499"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.491286 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wch8d" event={"ID":"1d93bd3f-1d60-4b09-9e89-4db1174876c1","Type":"ContainerStarted","Data":"6497de826bbb1905a99fa12fed9bf073c61829ab4085fd41a15f0ded7bc0f8cc"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.517633 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" event={"ID":"3ec322eb-b57f-4ba6-a635-b023501af402","Type":"ContainerStarted","Data":"2db25983e36bcf2cd9b8b03ffba93687eea10c3a6e902a2619530349eb3e7b6b"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.522680 4868 generic.go:334] "Generic (PLEG): container finished" podID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerID="e85a766f1ae65e5a11727a86b202493e6cbf01ffc208561472c220e9163c0400" exitCode=0 Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.523765 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw72n" event={"ID":"3bd6783c-cafd-4db5-b6df-ae558e765bd4","Type":"ContainerDied","Data":"e85a766f1ae65e5a11727a86b202493e6cbf01ffc208561472c220e9163c0400"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.527837 4868 generic.go:334] "Generic (PLEG): container finished" podID="c739901a-792b-4b7b-958d-5fcba129ff22" containerID="6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412" exitCode=0 Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.527902 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7d8ll" event={"ID":"c739901a-792b-4b7b-958d-5fcba129ff22","Type":"ContainerDied","Data":"6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.527930 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7d8ll" event={"ID":"c739901a-792b-4b7b-958d-5fcba129ff22","Type":"ContainerStarted","Data":"dab8721334ec9901c83db76f3dcbc5a3d534819ab172c2f6fb793df9dac8ebe5"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.531665 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-775r2" event={"ID":"9daf470c-0dc6-4f9b-b5ce-1f0438d40396","Type":"ContainerStarted","Data":"eeb1be80813e1a54c5149bbf5881f4737f46b8a441960a1abcbc80df19d13298"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.532100 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-775r2" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.540172 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.540406 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-utilities\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.540461 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-catalog-content\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.540581 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vp66n\" (UniqueName: \"kubernetes.io/projected/c8bc4ae7-ab51-41fd-b455-acbf54499fde-kube-api-access-vp66n\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.543113 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-catalog-content\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.543472 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-utilities\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.545834 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" event={"ID":"8586186d-7b44-4899-8eae-82c717ea38f5","Type":"ContainerStarted","Data":"50396142b42e472293206d5244770778b192a7f3a4102a7656e4b434cff30b7d"} Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.546792 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.040960849 +0000 UTC m=+160.250809905 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.558738 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" event={"ID":"000a8020-feb7-4a25-95a7-a13fdfa39109","Type":"ContainerStarted","Data":"dc38421c06ca2dfa8601715d4f1ec39c027dff96d82ba2bcef9bb8d330b53b54"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.581753 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vp66n\" (UniqueName: \"kubernetes.io/projected/c8bc4ae7-ab51-41fd-b455-acbf54499fde-kube-api-access-vp66n\") pod \"redhat-operators-clvll\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.583741 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-hpssf" podStartSLOduration=137.583728694 podStartE2EDuration="2m17.583728694s" podCreationTimestamp="2025-10-03 12:50:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:43.582574613 +0000 UTC m=+159.792423689" watchObservedRunningTime="2025-10-03 12:52:43.583728694 +0000 UTC m=+159.793577760" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.601458 4868 generic.go:334] "Generic (PLEG): container finished" podID="f74dd718-f443-4005-a447-f2384a2f218d" containerID="c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428" exitCode=0 Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.602857 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8vcv" event={"ID":"f74dd718-f443-4005-a447-f2384a2f218d","Type":"ContainerDied","Data":"c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428"} Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.641684 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.643888 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.143868076 +0000 UTC m=+160.353717212 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.647818 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-775r2" podStartSLOduration=10.647800622 podStartE2EDuration="10.647800622s" podCreationTimestamp="2025-10-03 12:52:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:43.611367765 +0000 UTC m=+159.821216861" watchObservedRunningTime="2025-10-03 12:52:43.647800622 +0000 UTC m=+159.857649688" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.676722 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-gsc8t" podStartSLOduration=138.676699946 podStartE2EDuration="2m18.676699946s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:43.665852295 +0000 UTC m=+159.875701361" watchObservedRunningTime="2025-10-03 12:52:43.676699946 +0000 UTC m=+159.886549012" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.723263 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.743185 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.743376 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.243346872 +0000 UTC m=+160.453195938 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.743919 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.770208 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:43 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:43 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:43 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.770279 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.770847 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.270814648 +0000 UTC m=+160.480663714 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.820476 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-fcf5t"] Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.845265 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.845448 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.345423487 +0000 UTC m=+160.555272553 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.845567 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.846168 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.346157427 +0000 UTC m=+160.556006503 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:43 crc kubenswrapper[4868]: I1003 12:52:43.946566 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:43 crc kubenswrapper[4868]: E1003 12:52:43.947407 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.44738787 +0000 UTC m=+160.657236936 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.052182 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:44 crc kubenswrapper[4868]: E1003 12:52:44.052692 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.552672371 +0000 UTC m=+160.762521427 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.067592 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-clvll"] Oct 03 12:52:44 crc kubenswrapper[4868]: W1003 12:52:44.115843 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8bc4ae7_ab51_41fd_b455_acbf54499fde.slice/crio-68c3f5e13404056b367c7f5e9f3b657c3178e2f54a56f9056ab2021f6a4eecf0 WatchSource:0}: Error finding container 68c3f5e13404056b367c7f5e9f3b657c3178e2f54a56f9056ab2021f6a4eecf0: Status 404 returned error can't find the container with id 68c3f5e13404056b367c7f5e9f3b657c3178e2f54a56f9056ab2021f6a4eecf0 Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.153002 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:44 crc kubenswrapper[4868]: E1003 12:52:44.153410 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.653375159 +0000 UTC m=+160.863224225 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.255949 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:44 crc kubenswrapper[4868]: E1003 12:52:44.256850 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.756832562 +0000 UTC m=+160.966681628 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.357423 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:44 crc kubenswrapper[4868]: E1003 12:52:44.358080 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.858041874 +0000 UTC m=+161.067890940 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.423594 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.424274 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.426584 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.427435 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.436777 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.459017 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:44 crc kubenswrapper[4868]: E1003 12:52:44.459582 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:44.959566775 +0000 UTC m=+161.169415841 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.560119 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.560431 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.560583 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: E1003 12:52:44.560796 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-10-03 12:52:45.060776196 +0000 UTC m=+161.270625262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.618699 4868 generic.go:334] "Generic (PLEG): container finished" podID="9725f594-2bef-441e-9407-8712be581aa9" containerID="229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb" exitCode=0 Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.618927 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fcf5t" event={"ID":"9725f594-2bef-441e-9407-8712be581aa9","Type":"ContainerDied","Data":"229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb"} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.619145 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fcf5t" event={"ID":"9725f594-2bef-441e-9407-8712be581aa9","Type":"ContainerStarted","Data":"ba119737a55735a13642fe000ac063c0d2c94dd42a9f8bbae98b4615fb2bb730"} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.630869 4868 generic.go:334] "Generic (PLEG): container finished" podID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerID="1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55" exitCode=0 Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.630965 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-729j7" event={"ID":"c0e0771c-be02-4e3d-933d-707dc8a76351","Type":"ContainerDied","Data":"1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55"} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.638985 4868 generic.go:334] "Generic (PLEG): container finished" podID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerID="917b82c8a6c04a95201b483fc33783897e2c907ca94621f6f1c7edf333708b37" exitCode=0 Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.639040 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clvll" event={"ID":"c8bc4ae7-ab51-41fd-b455-acbf54499fde","Type":"ContainerDied","Data":"917b82c8a6c04a95201b483fc33783897e2c907ca94621f6f1c7edf333708b37"} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.639081 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clvll" event={"ID":"c8bc4ae7-ab51-41fd-b455-acbf54499fde","Type":"ContainerStarted","Data":"68c3f5e13404056b367c7f5e9f3b657c3178e2f54a56f9056ab2021f6a4eecf0"} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.650154 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" event={"ID":"89de9048-c2c6-4caf-9ed2-17eee2161f84","Type":"ContainerStarted","Data":"3d2aa3f912ea927ddcaabc378040f4c5b4a0d0de9c2102796a94a00b3d602659"} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.655498 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" event={"ID":"000a8020-feb7-4a25-95a7-a13fdfa39109","Type":"ContainerStarted","Data":"9c4b2c6f8056a993caf1d6a011b98c8e6901e85701a958a5ff51001aa6ced67c"} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.661901 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.662010 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.662040 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.665227 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: E1003 12:52:44.665651 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-10-03 12:52:45.165590515 +0000 UTC m=+161.375439581 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-4cxr5" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.669592 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qjlm6" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.704348 4868 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.704955 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.724717 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" podStartSLOduration=140.724701659 podStartE2EDuration="2m20.724701659s" podCreationTimestamp="2025-10-03 12:50:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:44.704687593 +0000 UTC m=+160.914536659" watchObservedRunningTime="2025-10-03 12:52:44.724701659 +0000 UTC m=+160.934550725" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.738342 4868 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-10-03T12:52:44.704373675Z","Handler":null,"Name":""} Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.745184 4868 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.745227 4868 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.751401 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:44 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:44 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:44 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.751453 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.767951 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.771449 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.778175 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.875773 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.891079 4868 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Oct 03 12:52:44 crc kubenswrapper[4868]: I1003 12:52:44.891122 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.020541 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.046196 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-4cxr5\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.083895 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-secret-volume\") pod \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.084038 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgq2k\" (UniqueName: \"kubernetes.io/projected/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-kube-api-access-zgq2k\") pod \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.084110 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-config-volume\") pod \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\" (UID: \"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3\") " Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.085383 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-config-volume" (OuterVolumeSpecName: "config-volume") pod "b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" (UID: "b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.096008 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-kube-api-access-zgq2k" (OuterVolumeSpecName: "kube-api-access-zgq2k") pod "b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" (UID: "b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3"). InnerVolumeSpecName "kube-api-access-zgq2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.108166 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" (UID: "b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.185915 4868 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.186359 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgq2k\" (UniqueName: \"kubernetes.io/projected/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-kube-api-access-zgq2k\") on node \"crc\" DevicePath \"\"" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.186370 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.203577 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.309188 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.573832 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4cxr5"] Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.665328 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9c3417c9-6ea6-45ed-b593-3a8cea5001a6","Type":"ContainerStarted","Data":"6d0db774fd4ab1a26492f8c1417989d1e711a207ccd9771591b878a371e9f2e8"} Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.669861 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.669924 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b" event={"ID":"b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3","Type":"ContainerDied","Data":"60de94eb3c464e27ac963f7807367cd7f9b6c3a7ac41e8239016c09d53d61060"} Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.669961 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60de94eb3c464e27ac963f7807367cd7f9b6c3a7ac41e8239016c09d53d61060" Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.682325 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" event={"ID":"89de9048-c2c6-4caf-9ed2-17eee2161f84","Type":"ContainerStarted","Data":"6fd1a5ea35553457dc0634374045688558b35d9ab358aec4f2e2ebbbe52f4b96"} Oct 03 12:52:45 crc kubenswrapper[4868]: W1003 12:52:45.718642 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfe20bc6b_533e_46ee_bff3_e8a7a8a5ddd1.slice/crio-e035f32efd03e9dfeb8d3cec00c38f4b3bb32e9a79ba515521d6bbbd26c95e4c WatchSource:0}: Error finding container e035f32efd03e9dfeb8d3cec00c38f4b3bb32e9a79ba515521d6bbbd26c95e4c: Status 404 returned error can't find the container with id e035f32efd03e9dfeb8d3cec00c38f4b3bb32e9a79ba515521d6bbbd26c95e4c Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.755362 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:45 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:45 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:45 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:45 crc kubenswrapper[4868]: I1003 12:52:45.755421 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.196203 4868 patch_prober.go:28] interesting pod/downloads-7954f5f757-z5zzt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.196547 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z5zzt" podUID="38ba9fb1-abc5-45a3-b878-46d32e466672" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.196632 4868 patch_prober.go:28] interesting pod/downloads-7954f5f757-z5zzt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.196686 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-z5zzt" podUID="38ba9fb1-abc5-45a3-b878-46d32e466672" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.568563 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.632630 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.632668 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.641671 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.699952 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" event={"ID":"89de9048-c2c6-4caf-9ed2-17eee2161f84","Type":"ContainerStarted","Data":"4dcb9aaabfb39ab2419df3627c15d4a210b64274bb179719c93477e8967d7381"} Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.705669 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9c3417c9-6ea6-45ed-b593-3a8cea5001a6","Type":"ContainerStarted","Data":"cff4c4123002e7993a9ddd3e709c5343910f8773d718c55e24df942fb252b6a3"} Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.711513 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" event={"ID":"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1","Type":"ContainerStarted","Data":"1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278"} Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.711557 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.711571 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" event={"ID":"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1","Type":"ContainerStarted","Data":"e035f32efd03e9dfeb8d3cec00c38f4b3bb32e9a79ba515521d6bbbd26c95e4c"} Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.719110 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-4sjww" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.747891 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.753104 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-ktkcc" podStartSLOduration=13.753082144 podStartE2EDuration="13.753082144s" podCreationTimestamp="2025-10-03 12:52:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:46.725386861 +0000 UTC m=+162.935235927" watchObservedRunningTime="2025-10-03 12:52:46.753082144 +0000 UTC m=+162.962931230" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.753838 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" podStartSLOduration=141.753828723 podStartE2EDuration="2m21.753828723s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:46.747617467 +0000 UTC m=+162.957466543" watchObservedRunningTime="2025-10-03 12:52:46.753828723 +0000 UTC m=+162.963677789" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.754069 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:46 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:46 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:46 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.754113 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.766931 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.821252 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.821230679 podStartE2EDuration="2.821230679s" podCreationTimestamp="2025-10-03 12:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:46.818349012 +0000 UTC m=+163.028198078" watchObservedRunningTime="2025-10-03 12:52:46.821230679 +0000 UTC m=+163.031079745" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.880902 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.882379 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.898704 4868 patch_prober.go:28] interesting pod/apiserver-76f77b778f-qzzd4 container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]log ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]etcd ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/start-apiserver-admission-initializer ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/generic-apiserver-start-informers ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/max-in-flight-filter ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/storage-object-count-tracker-hook ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/image.openshift.io-apiserver-caches ok Oct 03 12:52:46 crc kubenswrapper[4868]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Oct 03 12:52:46 crc kubenswrapper[4868]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/project.openshift.io-projectcache ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/openshift.io-startinformers ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/openshift.io-restmapperupdater ok Oct 03 12:52:46 crc kubenswrapper[4868]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Oct 03 12:52:46 crc kubenswrapper[4868]: livez check failed Oct 03 12:52:46 crc kubenswrapper[4868]: I1003 12:52:46.898778 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" podUID="000a8020-feb7-4a25-95a7-a13fdfa39109" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.097119 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.097220 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.101894 4868 patch_prober.go:28] interesting pod/console-f9d7485db-dq7lc container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.38:8443/health\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.101946 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dq7lc" podUID="a4cfd01a-748d-42ec-9d69-bdf306168942" containerName="console" probeResult="failure" output="Get \"https://10.217.0.38:8443/health\": dial tcp 10.217.0.38:8443: connect: connection refused" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.730600 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9c3417c9-6ea6-45ed-b593-3a8cea5001a6","Type":"ContainerDied","Data":"cff4c4123002e7993a9ddd3e709c5343910f8773d718c55e24df942fb252b6a3"} Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.730740 4868 generic.go:334] "Generic (PLEG): container finished" podID="9c3417c9-6ea6-45ed-b593-3a8cea5001a6" containerID="cff4c4123002e7993a9ddd3e709c5343910f8773d718c55e24df942fb252b6a3" exitCode=0 Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.748871 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:47 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:47 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:47 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.748948 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.755497 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.764750 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/4fc2d690-5dcc-4f98-8607-0b3909f44c23-metrics-certs\") pod \"network-metrics-daemon-nwqvb\" (UID: \"4fc2d690-5dcc-4f98-8607-0b3909f44c23\") " pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.784427 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-nwqvb" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.935702 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 12:52:47 crc kubenswrapper[4868]: E1003 12:52:47.935950 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" containerName="collect-profiles" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.935962 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" containerName="collect-profiles" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.936130 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" containerName="collect-profiles" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.936587 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.941541 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.946642 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Oct 03 12:52:47 crc kubenswrapper[4868]: I1003 12:52:47.953145 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.059394 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/11294c4c-3b04-497e-acdb-1e58d9af49d2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.059440 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/11294c4c-3b04-497e-acdb-1e58d9af49d2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.116933 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-nwqvb"] Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.160235 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/11294c4c-3b04-497e-acdb-1e58d9af49d2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.161107 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/11294c4c-3b04-497e-acdb-1e58d9af49d2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.161181 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/11294c4c-3b04-497e-acdb-1e58d9af49d2-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.202585 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/11294c4c-3b04-497e-acdb-1e58d9af49d2-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.270628 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.618307 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.749291 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" event={"ID":"4fc2d690-5dcc-4f98-8607-0b3909f44c23","Type":"ContainerStarted","Data":"73fcca34a8497e0eb164b4d8171b80cd2111513abed81e0c26793d938c7a6a6c"} Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.749735 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:48 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:48 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:48 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.749779 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:48 crc kubenswrapper[4868]: I1003 12:52:48.756515 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"11294c4c-3b04-497e-acdb-1e58d9af49d2","Type":"ContainerStarted","Data":"5bcb5324c25c8f2fc1a13cd9601a38fb21be6408bdd972ecdc1b3fb70fa30696"} Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.153143 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.188594 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kube-api-access\") pod \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.209951 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "9c3417c9-6ea6-45ed-b593-3a8cea5001a6" (UID: "9c3417c9-6ea6-45ed-b593-3a8cea5001a6"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.290025 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kubelet-dir\") pod \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\" (UID: \"9c3417c9-6ea6-45ed-b593-3a8cea5001a6\") " Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.290133 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "9c3417c9-6ea6-45ed-b593-3a8cea5001a6" (UID: "9c3417c9-6ea6-45ed-b593-3a8cea5001a6"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.290582 4868 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.290608 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/9c3417c9-6ea6-45ed-b593-3a8cea5001a6-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.750423 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:49 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:49 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:49 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.750547 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.774097 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" event={"ID":"4fc2d690-5dcc-4f98-8607-0b3909f44c23","Type":"ContainerStarted","Data":"e5eb9d9e287f170042b983cf8bc6905909630d4e4a9b104ce7058ad5ddb2437b"} Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.779213 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"9c3417c9-6ea6-45ed-b593-3a8cea5001a6","Type":"ContainerDied","Data":"6d0db774fd4ab1a26492f8c1417989d1e711a207ccd9771591b878a371e9f2e8"} Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.779241 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d0db774fd4ab1a26492f8c1417989d1e711a207ccd9771591b878a371e9f2e8" Oct 03 12:52:49 crc kubenswrapper[4868]: I1003 12:52:49.779522 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Oct 03 12:52:50 crc kubenswrapper[4868]: I1003 12:52:50.749892 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:50 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:50 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:50 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:50 crc kubenswrapper[4868]: I1003 12:52:50.749954 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:50 crc kubenswrapper[4868]: I1003 12:52:50.786982 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"11294c4c-3b04-497e-acdb-1e58d9af49d2","Type":"ContainerStarted","Data":"3161f8713474003cb41807597e707ab0fbb5b90d6d2f7a8fd0f78fe292524c00"} Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.293156 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.329959 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.32993766 podStartE2EDuration="4.32993766s" podCreationTimestamp="2025-10-03 12:52:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:50.804591971 +0000 UTC m=+167.014441037" watchObservedRunningTime="2025-10-03 12:52:51.32993766 +0000 UTC m=+167.539786726" Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.748645 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:51 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:51 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:51 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.748708 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.797422 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-nwqvb" event={"ID":"4fc2d690-5dcc-4f98-8607-0b3909f44c23","Type":"ContainerStarted","Data":"22e807b49ea03bce402b3e916a54edc9f5136c85cdaa85ed49b2ef5efaaa8071"} Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.801400 4868 generic.go:334] "Generic (PLEG): container finished" podID="11294c4c-3b04-497e-acdb-1e58d9af49d2" containerID="3161f8713474003cb41807597e707ab0fbb5b90d6d2f7a8fd0f78fe292524c00" exitCode=0 Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.801455 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"11294c4c-3b04-497e-acdb-1e58d9af49d2","Type":"ContainerDied","Data":"3161f8713474003cb41807597e707ab0fbb5b90d6d2f7a8fd0f78fe292524c00"} Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.826611 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-nwqvb" podStartSLOduration=146.826585927 podStartE2EDuration="2m26.826585927s" podCreationTimestamp="2025-10-03 12:50:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:52:51.818243074 +0000 UTC m=+168.028092160" watchObservedRunningTime="2025-10-03 12:52:51.826585927 +0000 UTC m=+168.036435003" Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.887213 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:51 crc kubenswrapper[4868]: I1003 12:52:51.892936 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-qzzd4" Oct 03 12:52:52 crc kubenswrapper[4868]: I1003 12:52:52.113747 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-775r2" Oct 03 12:52:52 crc kubenswrapper[4868]: I1003 12:52:52.749215 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:52 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:52 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:52 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:52 crc kubenswrapper[4868]: I1003 12:52:52.749299 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.095550 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.163653 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/11294c4c-3b04-497e-acdb-1e58d9af49d2-kubelet-dir\") pod \"11294c4c-3b04-497e-acdb-1e58d9af49d2\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.163765 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/11294c4c-3b04-497e-acdb-1e58d9af49d2-kube-api-access\") pod \"11294c4c-3b04-497e-acdb-1e58d9af49d2\" (UID: \"11294c4c-3b04-497e-acdb-1e58d9af49d2\") " Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.163847 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/11294c4c-3b04-497e-acdb-1e58d9af49d2-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "11294c4c-3b04-497e-acdb-1e58d9af49d2" (UID: "11294c4c-3b04-497e-acdb-1e58d9af49d2"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.164004 4868 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/11294c4c-3b04-497e-acdb-1e58d9af49d2-kubelet-dir\") on node \"crc\" DevicePath \"\"" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.171394 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/11294c4c-3b04-497e-acdb-1e58d9af49d2-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "11294c4c-3b04-497e-acdb-1e58d9af49d2" (UID: "11294c4c-3b04-497e-acdb-1e58d9af49d2"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.266623 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/11294c4c-3b04-497e-acdb-1e58d9af49d2-kube-api-access\") on node \"crc\" DevicePath \"\"" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.749440 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:53 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:53 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:53 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.749526 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.824264 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"11294c4c-3b04-497e-acdb-1e58d9af49d2","Type":"ContainerDied","Data":"5bcb5324c25c8f2fc1a13cd9601a38fb21be6408bdd972ecdc1b3fb70fa30696"} Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.824371 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bcb5324c25c8f2fc1a13cd9601a38fb21be6408bdd972ecdc1b3fb70fa30696" Oct 03 12:52:53 crc kubenswrapper[4868]: I1003 12:52:53.824507 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Oct 03 12:52:54 crc kubenswrapper[4868]: I1003 12:52:54.748808 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:54 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:54 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:54 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:54 crc kubenswrapper[4868]: I1003 12:52:54.749172 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:55 crc kubenswrapper[4868]: I1003 12:52:55.749557 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:55 crc kubenswrapper[4868]: [-]has-synced failed: reason withheld Oct 03 12:52:55 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:55 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:55 crc kubenswrapper[4868]: I1003 12:52:55.749628 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:56 crc kubenswrapper[4868]: I1003 12:52:56.196022 4868 patch_prober.go:28] interesting pod/downloads-7954f5f757-z5zzt container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Oct 03 12:52:56 crc kubenswrapper[4868]: I1003 12:52:56.196084 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-z5zzt" podUID="38ba9fb1-abc5-45a3-b878-46d32e466672" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Oct 03 12:52:56 crc kubenswrapper[4868]: I1003 12:52:56.196123 4868 patch_prober.go:28] interesting pod/downloads-7954f5f757-z5zzt container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" start-of-body= Oct 03 12:52:56 crc kubenswrapper[4868]: I1003 12:52:56.196173 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-z5zzt" podUID="38ba9fb1-abc5-45a3-b878-46d32e466672" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.33:8080/\": dial tcp 10.217.0.33:8080: connect: connection refused" Oct 03 12:52:56 crc kubenswrapper[4868]: I1003 12:52:56.750496 4868 patch_prober.go:28] interesting pod/router-default-5444994796-wwvt7 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Oct 03 12:52:56 crc kubenswrapper[4868]: [+]has-synced ok Oct 03 12:52:56 crc kubenswrapper[4868]: [+]process-running ok Oct 03 12:52:56 crc kubenswrapper[4868]: healthz check failed Oct 03 12:52:56 crc kubenswrapper[4868]: I1003 12:52:56.751206 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-wwvt7" podUID="2c0fb16a-559e-47e9-98f3-54563b3bddf4" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Oct 03 12:52:57 crc kubenswrapper[4868]: I1003 12:52:57.097755 4868 patch_prober.go:28] interesting pod/console-f9d7485db-dq7lc container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.38:8443/health\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Oct 03 12:52:57 crc kubenswrapper[4868]: I1003 12:52:57.098421 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-dq7lc" podUID="a4cfd01a-748d-42ec-9d69-bdf306168942" containerName="console" probeResult="failure" output="Get \"https://10.217.0.38:8443/health\": dial tcp 10.217.0.38:8443: connect: connection refused" Oct 03 12:52:57 crc kubenswrapper[4868]: I1003 12:52:57.751954 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:52:57 crc kubenswrapper[4868]: I1003 12:52:57.755935 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-wwvt7" Oct 03 12:53:02 crc kubenswrapper[4868]: I1003 12:53:02.146900 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:53:02 crc kubenswrapper[4868]: I1003 12:53:02.147008 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 12:53:05 crc kubenswrapper[4868]: I1003 12:53:05.209742 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:53:06 crc kubenswrapper[4868]: I1003 12:53:06.209785 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-z5zzt" Oct 03 12:53:07 crc kubenswrapper[4868]: I1003 12:53:07.101637 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:53:07 crc kubenswrapper[4868]: I1003 12:53:07.105880 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 12:53:12 crc kubenswrapper[4868]: I1003 12:53:12.788458 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Oct 03 12:53:17 crc kubenswrapper[4868]: I1003 12:53:17.020268 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6c5lk" Oct 03 12:53:28 crc kubenswrapper[4868]: E1003 12:53:28.427590 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 12:53:28 crc kubenswrapper[4868]: E1003 12:53:28.429768 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vp66n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-clvll_openshift-marketplace(c8bc4ae7-ab51-41fd-b455-acbf54499fde): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:28 crc kubenswrapper[4868]: E1003 12:53:28.431038 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-clvll" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" Oct 03 12:53:31 crc kubenswrapper[4868]: E1003 12:53:31.839544 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-clvll" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" Oct 03 12:53:31 crc kubenswrapper[4868]: E1003 12:53:31.912679 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Oct 03 12:53:31 crc kubenswrapper[4868]: E1003 12:53:31.912859 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d89dh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-fcf5t_openshift-marketplace(9725f594-2bef-441e-9407-8712be581aa9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:31 crc kubenswrapper[4868]: E1003 12:53:31.914025 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-fcf5t" podUID="9725f594-2bef-441e-9407-8712be581aa9" Oct 03 12:53:32 crc kubenswrapper[4868]: I1003 12:53:32.145292 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:53:32 crc kubenswrapper[4868]: I1003 12:53:32.145350 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 12:53:32 crc kubenswrapper[4868]: I1003 12:53:32.145392 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:53:32 crc kubenswrapper[4868]: I1003 12:53:32.146325 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 12:53:32 crc kubenswrapper[4868]: I1003 12:53:32.146519 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea" gracePeriod=600 Oct 03 12:53:33 crc kubenswrapper[4868]: I1003 12:53:33.054880 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea" exitCode=0 Oct 03 12:53:33 crc kubenswrapper[4868]: I1003 12:53:33.054939 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea"} Oct 03 12:53:36 crc kubenswrapper[4868]: E1003 12:53:36.624376 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-fcf5t" podUID="9725f594-2bef-441e-9407-8712be581aa9" Oct 03 12:53:41 crc kubenswrapper[4868]: E1003 12:53:41.683876 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 12:53:41 crc kubenswrapper[4868]: E1003 12:53:41.684350 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8jz75,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-9gtqf_openshift-marketplace(c39a6e17-1832-4321-a2a1-35adc3dd841b): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:41 crc kubenswrapper[4868]: E1003 12:53:41.686409 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-9gtqf" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" Oct 03 12:53:42 crc kubenswrapper[4868]: E1003 12:53:42.230568 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Oct 03 12:53:42 crc kubenswrapper[4868]: E1003 12:53:42.231116 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pt2lf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-qw72n_openshift-marketplace(3bd6783c-cafd-4db5-b6df-ae558e765bd4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:42 crc kubenswrapper[4868]: E1003 12:53:42.232466 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-qw72n" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" Oct 03 12:53:42 crc kubenswrapper[4868]: E1003 12:53:42.462285 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-9gtqf" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" Oct 03 12:53:43 crc kubenswrapper[4868]: E1003 12:53:43.001281 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 12:53:43 crc kubenswrapper[4868]: E1003 12:53:43.001533 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fxpbb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-q8vcv_openshift-marketplace(f74dd718-f443-4005-a447-f2384a2f218d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:43 crc kubenswrapper[4868]: E1003 12:53:43.003790 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-q8vcv" podUID="f74dd718-f443-4005-a447-f2384a2f218d" Oct 03 12:53:47 crc kubenswrapper[4868]: E1003 12:53:47.661986 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Oct 03 12:53:47 crc kubenswrapper[4868]: E1003 12:53:47.662421 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-g5kjj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-wch8d_openshift-marketplace(1d93bd3f-1d60-4b09-9e89-4db1174876c1): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:47 crc kubenswrapper[4868]: E1003 12:53:47.664360 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-wch8d" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" Oct 03 12:53:50 crc kubenswrapper[4868]: E1003 12:53:50.411717 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-q8vcv" podUID="f74dd718-f443-4005-a447-f2384a2f218d" Oct 03 12:53:50 crc kubenswrapper[4868]: E1003 12:53:50.412180 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-wch8d" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" Oct 03 12:53:50 crc kubenswrapper[4868]: E1003 12:53:50.412278 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-qw72n" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" Oct 03 12:53:51 crc kubenswrapper[4868]: E1003 12:53:51.130411 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 12:53:51 crc kubenswrapper[4868]: E1003 12:53:51.130892 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jtqlp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7d8ll_openshift-marketplace(c739901a-792b-4b7b-958d-5fcba129ff22): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:51 crc kubenswrapper[4868]: E1003 12:53:51.132119 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7d8ll" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" Oct 03 12:53:51 crc kubenswrapper[4868]: E1003 12:53:51.140256 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7d8ll" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" Oct 03 12:53:51 crc kubenswrapper[4868]: E1003 12:53:51.150298 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Oct 03 12:53:51 crc kubenswrapper[4868]: E1003 12:53:51.150492 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rtxhx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-729j7_openshift-marketplace(c0e0771c-be02-4e3d-933d-707dc8a76351): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Oct 03 12:53:51 crc kubenswrapper[4868]: E1003 12:53:51.151605 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-729j7" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" Oct 03 12:53:52 crc kubenswrapper[4868]: I1003 12:53:52.145440 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"e93acf18d8e635e25e39f3282746d01e8bec7978d70317a883aafe7973414ea5"} Oct 03 12:53:52 crc kubenswrapper[4868]: I1003 12:53:52.148460 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fcf5t" event={"ID":"9725f594-2bef-441e-9407-8712be581aa9","Type":"ContainerStarted","Data":"9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2"} Oct 03 12:53:52 crc kubenswrapper[4868]: I1003 12:53:52.150456 4868 generic.go:334] "Generic (PLEG): container finished" podID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerID="dde0aff41931995e9dd929375ff344d3d102d9315d249cb59f339e02419ed5f3" exitCode=0 Oct 03 12:53:52 crc kubenswrapper[4868]: I1003 12:53:52.150547 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clvll" event={"ID":"c8bc4ae7-ab51-41fd-b455-acbf54499fde","Type":"ContainerDied","Data":"dde0aff41931995e9dd929375ff344d3d102d9315d249cb59f339e02419ed5f3"} Oct 03 12:53:52 crc kubenswrapper[4868]: E1003 12:53:52.151771 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-729j7" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" Oct 03 12:53:53 crc kubenswrapper[4868]: I1003 12:53:53.168980 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clvll" event={"ID":"c8bc4ae7-ab51-41fd-b455-acbf54499fde","Type":"ContainerStarted","Data":"9b9eacbe36b451a033e7fe810ad0afaa506f13b52f88569261cf6117f69e289d"} Oct 03 12:53:53 crc kubenswrapper[4868]: I1003 12:53:53.173814 4868 generic.go:334] "Generic (PLEG): container finished" podID="9725f594-2bef-441e-9407-8712be581aa9" containerID="9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2" exitCode=0 Oct 03 12:53:53 crc kubenswrapper[4868]: I1003 12:53:53.174138 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fcf5t" event={"ID":"9725f594-2bef-441e-9407-8712be581aa9","Type":"ContainerDied","Data":"9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2"} Oct 03 12:53:53 crc kubenswrapper[4868]: I1003 12:53:53.187426 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-clvll" podStartSLOduration=2.112866101 podStartE2EDuration="1m10.187406034s" podCreationTimestamp="2025-10-03 12:52:43 +0000 UTC" firstStartedPulling="2025-10-03 12:52:44.640565515 +0000 UTC m=+160.850414581" lastFinishedPulling="2025-10-03 12:53:52.715105448 +0000 UTC m=+228.924954514" observedRunningTime="2025-10-03 12:53:53.186317122 +0000 UTC m=+229.396166198" watchObservedRunningTime="2025-10-03 12:53:53.187406034 +0000 UTC m=+229.397255100" Oct 03 12:53:53 crc kubenswrapper[4868]: I1003 12:53:53.724607 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:53:53 crc kubenswrapper[4868]: I1003 12:53:53.724908 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:53:54 crc kubenswrapper[4868]: I1003 12:53:54.180785 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fcf5t" event={"ID":"9725f594-2bef-441e-9407-8712be581aa9","Type":"ContainerStarted","Data":"ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc"} Oct 03 12:53:54 crc kubenswrapper[4868]: I1003 12:53:54.199181 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-fcf5t" podStartSLOduration=3.188372327 podStartE2EDuration="1m12.199158923s" podCreationTimestamp="2025-10-03 12:52:42 +0000 UTC" firstStartedPulling="2025-10-03 12:52:44.628973894 +0000 UTC m=+160.838822970" lastFinishedPulling="2025-10-03 12:53:53.6397605 +0000 UTC m=+229.849609566" observedRunningTime="2025-10-03 12:53:54.196260797 +0000 UTC m=+230.406109863" watchObservedRunningTime="2025-10-03 12:53:54.199158923 +0000 UTC m=+230.409007999" Oct 03 12:53:54 crc kubenswrapper[4868]: I1003 12:53:54.875405 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-clvll" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="registry-server" probeResult="failure" output=< Oct 03 12:53:54 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 12:53:54 crc kubenswrapper[4868]: > Oct 03 12:53:57 crc kubenswrapper[4868]: I1003 12:53:57.196494 4868 generic.go:334] "Generic (PLEG): container finished" podID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerID="d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57" exitCode=0 Oct 03 12:53:57 crc kubenswrapper[4868]: I1003 12:53:57.196735 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9gtqf" event={"ID":"c39a6e17-1832-4321-a2a1-35adc3dd841b","Type":"ContainerDied","Data":"d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57"} Oct 03 12:53:58 crc kubenswrapper[4868]: I1003 12:53:58.205246 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9gtqf" event={"ID":"c39a6e17-1832-4321-a2a1-35adc3dd841b","Type":"ContainerStarted","Data":"3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf"} Oct 03 12:53:58 crc kubenswrapper[4868]: I1003 12:53:58.223170 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9gtqf" podStartSLOduration=5.003870282 podStartE2EDuration="1m19.223150019s" podCreationTimestamp="2025-10-03 12:52:39 +0000 UTC" firstStartedPulling="2025-10-03 12:52:43.481544786 +0000 UTC m=+159.691393852" lastFinishedPulling="2025-10-03 12:53:57.700824523 +0000 UTC m=+233.910673589" observedRunningTime="2025-10-03 12:53:58.221361697 +0000 UTC m=+234.431210783" watchObservedRunningTime="2025-10-03 12:53:58.223150019 +0000 UTC m=+234.432999085" Oct 03 12:54:00 crc kubenswrapper[4868]: I1003 12:54:00.362754 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:54:00 crc kubenswrapper[4868]: I1003 12:54:00.363102 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:54:00 crc kubenswrapper[4868]: I1003 12:54:00.421167 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:54:03 crc kubenswrapper[4868]: I1003 12:54:03.411512 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:54:03 crc kubenswrapper[4868]: I1003 12:54:03.411907 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:54:03 crc kubenswrapper[4868]: I1003 12:54:03.468175 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:54:03 crc kubenswrapper[4868]: I1003 12:54:03.779880 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:54:03 crc kubenswrapper[4868]: I1003 12:54:03.820198 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:54:04 crc kubenswrapper[4868]: I1003 12:54:04.268694 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:54:04 crc kubenswrapper[4868]: I1003 12:54:04.702625 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-clvll"] Oct 03 12:54:05 crc kubenswrapper[4868]: I1003 12:54:05.238949 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-clvll" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="registry-server" containerID="cri-o://9b9eacbe36b451a033e7fe810ad0afaa506f13b52f88569261cf6117f69e289d" gracePeriod=2 Oct 03 12:54:07 crc kubenswrapper[4868]: I1003 12:54:07.250203 4868 generic.go:334] "Generic (PLEG): container finished" podID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerID="9b9eacbe36b451a033e7fe810ad0afaa506f13b52f88569261cf6117f69e289d" exitCode=0 Oct 03 12:54:07 crc kubenswrapper[4868]: I1003 12:54:07.250269 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clvll" event={"ID":"c8bc4ae7-ab51-41fd-b455-acbf54499fde","Type":"ContainerDied","Data":"9b9eacbe36b451a033e7fe810ad0afaa506f13b52f88569261cf6117f69e289d"} Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.262139 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-clvll" event={"ID":"c8bc4ae7-ab51-41fd-b455-acbf54499fde","Type":"ContainerDied","Data":"68c3f5e13404056b367c7f5e9f3b657c3178e2f54a56f9056ab2021f6a4eecf0"} Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.262534 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="68c3f5e13404056b367c7f5e9f3b657c3178e2f54a56f9056ab2021f6a4eecf0" Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.277478 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.386411 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-catalog-content\") pod \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.386479 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vp66n\" (UniqueName: \"kubernetes.io/projected/c8bc4ae7-ab51-41fd-b455-acbf54499fde-kube-api-access-vp66n\") pod \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.386515 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-utilities\") pod \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\" (UID: \"c8bc4ae7-ab51-41fd-b455-acbf54499fde\") " Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.387656 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-utilities" (OuterVolumeSpecName: "utilities") pod "c8bc4ae7-ab51-41fd-b455-acbf54499fde" (UID: "c8bc4ae7-ab51-41fd-b455-acbf54499fde"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.394469 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c8bc4ae7-ab51-41fd-b455-acbf54499fde-kube-api-access-vp66n" (OuterVolumeSpecName: "kube-api-access-vp66n") pod "c8bc4ae7-ab51-41fd-b455-acbf54499fde" (UID: "c8bc4ae7-ab51-41fd-b455-acbf54499fde"). InnerVolumeSpecName "kube-api-access-vp66n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.468270 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c8bc4ae7-ab51-41fd-b455-acbf54499fde" (UID: "c8bc4ae7-ab51-41fd-b455-acbf54499fde"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.488527 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.488576 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vp66n\" (UniqueName: \"kubernetes.io/projected/c8bc4ae7-ab51-41fd-b455-acbf54499fde-kube-api-access-vp66n\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:09 crc kubenswrapper[4868]: I1003 12:54:09.488594 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c8bc4ae7-ab51-41fd-b455-acbf54499fde-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.268121 4868 generic.go:334] "Generic (PLEG): container finished" podID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerID="15f590b272a712d84f2ca2e6a807693cf686b6836a933b51666aa49be1d7db8b" exitCode=0 Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.268251 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw72n" event={"ID":"3bd6783c-cafd-4db5-b6df-ae558e765bd4","Type":"ContainerDied","Data":"15f590b272a712d84f2ca2e6a807693cf686b6836a933b51666aa49be1d7db8b"} Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.273812 4868 generic.go:334] "Generic (PLEG): container finished" podID="f74dd718-f443-4005-a447-f2384a2f218d" containerID="efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d" exitCode=0 Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.273970 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8vcv" event={"ID":"f74dd718-f443-4005-a447-f2384a2f218d","Type":"ContainerDied","Data":"efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d"} Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.282634 4868 generic.go:334] "Generic (PLEG): container finished" podID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerID="679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe" exitCode=0 Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.282722 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-729j7" event={"ID":"c0e0771c-be02-4e3d-933d-707dc8a76351","Type":"ContainerDied","Data":"679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe"} Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.287578 4868 generic.go:334] "Generic (PLEG): container finished" podID="c739901a-792b-4b7b-958d-5fcba129ff22" containerID="0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b" exitCode=0 Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.287619 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7d8ll" event={"ID":"c739901a-792b-4b7b-958d-5fcba129ff22","Type":"ContainerDied","Data":"0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b"} Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.294122 4868 generic.go:334] "Generic (PLEG): container finished" podID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerID="6b56401f80176bb850141ede20fea3b2fa279fa3a32f55e39603fe5493e294cd" exitCode=0 Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.294226 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-clvll" Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.294527 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wch8d" event={"ID":"1d93bd3f-1d60-4b09-9e89-4db1174876c1","Type":"ContainerDied","Data":"6b56401f80176bb850141ede20fea3b2fa279fa3a32f55e39603fe5493e294cd"} Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.337036 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-clvll"] Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.340709 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-clvll"] Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.401866 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:54:10 crc kubenswrapper[4868]: I1003 12:54:10.551391 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" path="/var/lib/kubelet/pods/c8bc4ae7-ab51-41fd-b455-acbf54499fde/volumes" Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.305958 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wch8d" event={"ID":"1d93bd3f-1d60-4b09-9e89-4db1174876c1","Type":"ContainerStarted","Data":"f7fa95d3dffd4eb637b7630f7f7525aef7fd7cf556367205ed1548078b12479f"} Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.308606 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw72n" event={"ID":"3bd6783c-cafd-4db5-b6df-ae558e765bd4","Type":"ContainerStarted","Data":"2f179ae40fb6c756ea6effd0f1a38c845b61814b2fb3a5b02ad596b1a4b53a6b"} Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.311704 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8vcv" event={"ID":"f74dd718-f443-4005-a447-f2384a2f218d","Type":"ContainerStarted","Data":"1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535"} Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.314180 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-729j7" event={"ID":"c0e0771c-be02-4e3d-933d-707dc8a76351","Type":"ContainerStarted","Data":"8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab"} Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.316219 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7d8ll" event={"ID":"c739901a-792b-4b7b-958d-5fcba129ff22","Type":"ContainerStarted","Data":"59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6"} Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.333377 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wch8d" podStartSLOduration=5.093177386 podStartE2EDuration="1m33.333353155s" podCreationTimestamp="2025-10-03 12:52:39 +0000 UTC" firstStartedPulling="2025-10-03 12:52:43.509867145 +0000 UTC m=+159.719716211" lastFinishedPulling="2025-10-03 12:54:11.750042914 +0000 UTC m=+247.959891980" observedRunningTime="2025-10-03 12:54:12.329896963 +0000 UTC m=+248.539746029" watchObservedRunningTime="2025-10-03 12:54:12.333353155 +0000 UTC m=+248.543202221" Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.352248 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.352307 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.370971 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-729j7" podStartSLOduration=3.391939808 podStartE2EDuration="1m31.370952557s" podCreationTimestamp="2025-10-03 12:52:41 +0000 UTC" firstStartedPulling="2025-10-03 12:52:43.46638561 +0000 UTC m=+159.676234676" lastFinishedPulling="2025-10-03 12:54:11.445398359 +0000 UTC m=+247.655247425" observedRunningTime="2025-10-03 12:54:12.352419399 +0000 UTC m=+248.562268465" watchObservedRunningTime="2025-10-03 12:54:12.370952557 +0000 UTC m=+248.580801623" Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.393644 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-qw72n" podStartSLOduration=4.366084197 podStartE2EDuration="1m32.393623998s" podCreationTimestamp="2025-10-03 12:52:40 +0000 UTC" firstStartedPulling="2025-10-03 12:52:43.525794232 +0000 UTC m=+159.735643298" lastFinishedPulling="2025-10-03 12:54:11.553334033 +0000 UTC m=+247.763183099" observedRunningTime="2025-10-03 12:54:12.372865804 +0000 UTC m=+248.582714870" watchObservedRunningTime="2025-10-03 12:54:12.393623998 +0000 UTC m=+248.603473064" Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.432749 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-q8vcv" podStartSLOduration=5.535937435 podStartE2EDuration="1m33.432732145s" podCreationTimestamp="2025-10-03 12:52:39 +0000 UTC" firstStartedPulling="2025-10-03 12:52:43.625229207 +0000 UTC m=+159.835078273" lastFinishedPulling="2025-10-03 12:54:11.522023917 +0000 UTC m=+247.731872983" observedRunningTime="2025-10-03 12:54:12.396591986 +0000 UTC m=+248.606441052" watchObservedRunningTime="2025-10-03 12:54:12.432732145 +0000 UTC m=+248.642581211" Oct 03 12:54:12 crc kubenswrapper[4868]: I1003 12:54:12.433072 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7d8ll" podStartSLOduration=3.318273268 podStartE2EDuration="1m31.433068505s" podCreationTimestamp="2025-10-03 12:52:41 +0000 UTC" firstStartedPulling="2025-10-03 12:52:43.531365852 +0000 UTC m=+159.741214918" lastFinishedPulling="2025-10-03 12:54:11.646161089 +0000 UTC m=+247.856010155" observedRunningTime="2025-10-03 12:54:12.43120445 +0000 UTC m=+248.641053516" watchObservedRunningTime="2025-10-03 12:54:12.433068505 +0000 UTC m=+248.642917571" Oct 03 12:54:13 crc kubenswrapper[4868]: I1003 12:54:13.411302 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-729j7" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="registry-server" probeResult="failure" output=< Oct 03 12:54:13 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 12:54:13 crc kubenswrapper[4868]: > Oct 03 12:54:19 crc kubenswrapper[4868]: I1003 12:54:19.842990 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:54:19 crc kubenswrapper[4868]: I1003 12:54:19.844780 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:54:19 crc kubenswrapper[4868]: I1003 12:54:19.887618 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:54:20 crc kubenswrapper[4868]: I1003 12:54:20.401497 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:54:20 crc kubenswrapper[4868]: I1003 12:54:20.947107 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:54:20 crc kubenswrapper[4868]: I1003 12:54:20.947170 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:54:20 crc kubenswrapper[4868]: I1003 12:54:20.955016 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:54:20 crc kubenswrapper[4868]: I1003 12:54:20.955040 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:54:20 crc kubenswrapper[4868]: I1003 12:54:20.987886 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:54:20 crc kubenswrapper[4868]: I1003 12:54:20.991574 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:54:21 crc kubenswrapper[4868]: I1003 12:54:21.400990 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:54:21 crc kubenswrapper[4868]: I1003 12:54:21.403252 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:54:21 crc kubenswrapper[4868]: I1003 12:54:21.875087 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:54:21 crc kubenswrapper[4868]: I1003 12:54:21.875144 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:54:21 crc kubenswrapper[4868]: I1003 12:54:21.911952 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:54:22 crc kubenswrapper[4868]: I1003 12:54:22.389671 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:54:22 crc kubenswrapper[4868]: I1003 12:54:22.427220 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:54:22 crc kubenswrapper[4868]: I1003 12:54:22.436072 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:54:23 crc kubenswrapper[4868]: I1003 12:54:23.105846 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qw72n"] Oct 03 12:54:23 crc kubenswrapper[4868]: I1003 12:54:23.301962 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wch8d"] Oct 03 12:54:23 crc kubenswrapper[4868]: I1003 12:54:23.377862 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-qw72n" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="registry-server" containerID="cri-o://2f179ae40fb6c756ea6effd0f1a38c845b61814b2fb3a5b02ad596b1a4b53a6b" gracePeriod=2 Oct 03 12:54:23 crc kubenswrapper[4868]: I1003 12:54:23.378275 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wch8d" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="registry-server" containerID="cri-o://f7fa95d3dffd4eb637b7630f7f7525aef7fd7cf556367205ed1548078b12479f" gracePeriod=2 Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.412443 4868 generic.go:334] "Generic (PLEG): container finished" podID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerID="f7fa95d3dffd4eb637b7630f7f7525aef7fd7cf556367205ed1548078b12479f" exitCode=0 Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.413127 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wch8d" event={"ID":"1d93bd3f-1d60-4b09-9e89-4db1174876c1","Type":"ContainerDied","Data":"f7fa95d3dffd4eb637b7630f7f7525aef7fd7cf556367205ed1548078b12479f"} Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.422963 4868 generic.go:334] "Generic (PLEG): container finished" podID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerID="2f179ae40fb6c756ea6effd0f1a38c845b61814b2fb3a5b02ad596b1a4b53a6b" exitCode=0 Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.423014 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw72n" event={"ID":"3bd6783c-cafd-4db5-b6df-ae558e765bd4","Type":"ContainerDied","Data":"2f179ae40fb6c756ea6effd0f1a38c845b61814b2fb3a5b02ad596b1a4b53a6b"} Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.524970 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.637390 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.680682 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-catalog-content\") pod \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.680731 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pt2lf\" (UniqueName: \"kubernetes.io/projected/3bd6783c-cafd-4db5-b6df-ae558e765bd4-kube-api-access-pt2lf\") pod \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.680781 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-utilities\") pod \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\" (UID: \"3bd6783c-cafd-4db5-b6df-ae558e765bd4\") " Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.681656 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-utilities" (OuterVolumeSpecName: "utilities") pod "3bd6783c-cafd-4db5-b6df-ae558e765bd4" (UID: "3bd6783c-cafd-4db5-b6df-ae558e765bd4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.686321 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3bd6783c-cafd-4db5-b6df-ae558e765bd4-kube-api-access-pt2lf" (OuterVolumeSpecName: "kube-api-access-pt2lf") pod "3bd6783c-cafd-4db5-b6df-ae558e765bd4" (UID: "3bd6783c-cafd-4db5-b6df-ae558e765bd4"). InnerVolumeSpecName "kube-api-access-pt2lf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.735635 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3bd6783c-cafd-4db5-b6df-ae558e765bd4" (UID: "3bd6783c-cafd-4db5-b6df-ae558e765bd4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.781999 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-catalog-content\") pod \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.782116 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g5kjj\" (UniqueName: \"kubernetes.io/projected/1d93bd3f-1d60-4b09-9e89-4db1174876c1-kube-api-access-g5kjj\") pod \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.782166 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-utilities\") pod \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\" (UID: \"1d93bd3f-1d60-4b09-9e89-4db1174876c1\") " Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.782451 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.782472 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pt2lf\" (UniqueName: \"kubernetes.io/projected/3bd6783c-cafd-4db5-b6df-ae558e765bd4-kube-api-access-pt2lf\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.782487 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3bd6783c-cafd-4db5-b6df-ae558e765bd4-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.783019 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-utilities" (OuterVolumeSpecName: "utilities") pod "1d93bd3f-1d60-4b09-9e89-4db1174876c1" (UID: "1d93bd3f-1d60-4b09-9e89-4db1174876c1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.785594 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d93bd3f-1d60-4b09-9e89-4db1174876c1-kube-api-access-g5kjj" (OuterVolumeSpecName: "kube-api-access-g5kjj") pod "1d93bd3f-1d60-4b09-9e89-4db1174876c1" (UID: "1d93bd3f-1d60-4b09-9e89-4db1174876c1"). InnerVolumeSpecName "kube-api-access-g5kjj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.824860 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d93bd3f-1d60-4b09-9e89-4db1174876c1" (UID: "1d93bd3f-1d60-4b09-9e89-4db1174876c1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.883247 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g5kjj\" (UniqueName: \"kubernetes.io/projected/1d93bd3f-1d60-4b09-9e89-4db1174876c1-kube-api-access-g5kjj\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.883277 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:24 crc kubenswrapper[4868]: I1003 12:54:24.883287 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d93bd3f-1d60-4b09-9e89-4db1174876c1-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.429796 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wch8d" event={"ID":"1d93bd3f-1d60-4b09-9e89-4db1174876c1","Type":"ContainerDied","Data":"6497de826bbb1905a99fa12fed9bf073c61829ab4085fd41a15f0ded7bc0f8cc"} Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.430347 4868 scope.go:117] "RemoveContainer" containerID="f7fa95d3dffd4eb637b7630f7f7525aef7fd7cf556367205ed1548078b12479f" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.429826 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wch8d" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.432128 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-qw72n" event={"ID":"3bd6783c-cafd-4db5-b6df-ae558e765bd4","Type":"ContainerDied","Data":"254c7bb820fc15ba1f6b562245fd48258ab1b73cd67fe6171894408b5b533718"} Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.432172 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-qw72n" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.452139 4868 scope.go:117] "RemoveContainer" containerID="6b56401f80176bb850141ede20fea3b2fa279fa3a32f55e39603fe5493e294cd" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.460699 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wch8d"] Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.464734 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wch8d"] Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.478586 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-qw72n"] Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.482661 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-qw72n"] Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.491005 4868 scope.go:117] "RemoveContainer" containerID="7505de666c7700d063890d3ef278f14389978e29ccdead4441b6a3d315590499" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.504448 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-729j7"] Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.504651 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-729j7" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="registry-server" containerID="cri-o://8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab" gracePeriod=2 Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.505282 4868 scope.go:117] "RemoveContainer" containerID="2f179ae40fb6c756ea6effd0f1a38c845b61814b2fb3a5b02ad596b1a4b53a6b" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.524911 4868 scope.go:117] "RemoveContainer" containerID="15f590b272a712d84f2ca2e6a807693cf686b6836a933b51666aa49be1d7db8b" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.539697 4868 scope.go:117] "RemoveContainer" containerID="e85a766f1ae65e5a11727a86b202493e6cbf01ffc208561472c220e9163c0400" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.845880 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.996095 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-catalog-content\") pod \"c0e0771c-be02-4e3d-933d-707dc8a76351\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.996143 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtxhx\" (UniqueName: \"kubernetes.io/projected/c0e0771c-be02-4e3d-933d-707dc8a76351-kube-api-access-rtxhx\") pod \"c0e0771c-be02-4e3d-933d-707dc8a76351\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.996330 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-utilities\") pod \"c0e0771c-be02-4e3d-933d-707dc8a76351\" (UID: \"c0e0771c-be02-4e3d-933d-707dc8a76351\") " Oct 03 12:54:25 crc kubenswrapper[4868]: I1003 12:54:25.997602 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-utilities" (OuterVolumeSpecName: "utilities") pod "c0e0771c-be02-4e3d-933d-707dc8a76351" (UID: "c0e0771c-be02-4e3d-933d-707dc8a76351"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.005486 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0e0771c-be02-4e3d-933d-707dc8a76351-kube-api-access-rtxhx" (OuterVolumeSpecName: "kube-api-access-rtxhx") pod "c0e0771c-be02-4e3d-933d-707dc8a76351" (UID: "c0e0771c-be02-4e3d-933d-707dc8a76351"). InnerVolumeSpecName "kube-api-access-rtxhx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.024541 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0e0771c-be02-4e3d-933d-707dc8a76351" (UID: "c0e0771c-be02-4e3d-933d-707dc8a76351"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.097679 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.097708 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0e0771c-be02-4e3d-933d-707dc8a76351-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.097718 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtxhx\" (UniqueName: \"kubernetes.io/projected/c0e0771c-be02-4e3d-933d-707dc8a76351-kube-api-access-rtxhx\") on node \"crc\" DevicePath \"\"" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.441993 4868 generic.go:334] "Generic (PLEG): container finished" podID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerID="8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab" exitCode=0 Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.442029 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-729j7" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.442037 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-729j7" event={"ID":"c0e0771c-be02-4e3d-933d-707dc8a76351","Type":"ContainerDied","Data":"8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab"} Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.442090 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-729j7" event={"ID":"c0e0771c-be02-4e3d-933d-707dc8a76351","Type":"ContainerDied","Data":"82370337aa45a78f59937c3502e83b74c63ae61df5549425d5842a7e7186ced4"} Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.442108 4868 scope.go:117] "RemoveContainer" containerID="8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.468351 4868 scope.go:117] "RemoveContainer" containerID="679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.468858 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-729j7"] Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.471199 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-729j7"] Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.489541 4868 scope.go:117] "RemoveContainer" containerID="1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.503960 4868 scope.go:117] "RemoveContainer" containerID="8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab" Oct 03 12:54:26 crc kubenswrapper[4868]: E1003 12:54:26.504399 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab\": container with ID starting with 8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab not found: ID does not exist" containerID="8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.504431 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab"} err="failed to get container status \"8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab\": rpc error: code = NotFound desc = could not find container \"8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab\": container with ID starting with 8d31904707833de0a1d75a2348c238a6c293e27d0242f28cbbb597f9d0b3a5ab not found: ID does not exist" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.504452 4868 scope.go:117] "RemoveContainer" containerID="679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe" Oct 03 12:54:26 crc kubenswrapper[4868]: E1003 12:54:26.504744 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe\": container with ID starting with 679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe not found: ID does not exist" containerID="679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.504870 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe"} err="failed to get container status \"679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe\": rpc error: code = NotFound desc = could not find container \"679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe\": container with ID starting with 679eaf35c069cee8944ff89cf447531af442705448bc2fdfd0d9e6d81a8213fe not found: ID does not exist" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.504994 4868 scope.go:117] "RemoveContainer" containerID="1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55" Oct 03 12:54:26 crc kubenswrapper[4868]: E1003 12:54:26.505681 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55\": container with ID starting with 1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55 not found: ID does not exist" containerID="1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.505728 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55"} err="failed to get container status \"1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55\": rpc error: code = NotFound desc = could not find container \"1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55\": container with ID starting with 1338943823aba9de43b24913847f0116d03aeecf57dad8a9fc2ede916c956a55 not found: ID does not exist" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.572344 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" path="/var/lib/kubelet/pods/1d93bd3f-1d60-4b09-9e89-4db1174876c1/volumes" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.573232 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" path="/var/lib/kubelet/pods/3bd6783c-cafd-4db5-b6df-ae558e765bd4/volumes" Oct 03 12:54:26 crc kubenswrapper[4868]: I1003 12:54:26.573776 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" path="/var/lib/kubelet/pods/c0e0771c-be02-4e3d-933d-707dc8a76351/volumes" Oct 03 12:54:36 crc kubenswrapper[4868]: I1003 12:54:36.800480 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cdsl9"] Oct 03 12:55:01 crc kubenswrapper[4868]: I1003 12:55:01.832293 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" podUID="5be75ac0-b221-4c59-b9c0-67d4b77f7d86" containerName="oauth-openshift" containerID="cri-o://7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811" gracePeriod=15 Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.155922 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188163 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7b49777cd7-xsc74"] Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188345 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188356 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188366 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188372 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188380 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188386 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188395 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188400 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188409 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188415 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188422 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188427 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188434 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188439 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188446 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c3417c9-6ea6-45ed-b593-3a8cea5001a6" containerName="pruner" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188452 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c3417c9-6ea6-45ed-b593-3a8cea5001a6" containerName="pruner" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188458 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188463 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188472 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5be75ac0-b221-4c59-b9c0-67d4b77f7d86" containerName="oauth-openshift" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188477 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5be75ac0-b221-4c59-b9c0-67d4b77f7d86" containerName="oauth-openshift" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188486 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188491 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="extract-utilities" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188501 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188507 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188520 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="11294c4c-3b04-497e-acdb-1e58d9af49d2" containerName="pruner" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188527 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="11294c4c-3b04-497e-acdb-1e58d9af49d2" containerName="pruner" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188535 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188541 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.188548 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188553 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="extract-content" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188631 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d93bd3f-1d60-4b09-9e89-4db1174876c1" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188641 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="11294c4c-3b04-497e-acdb-1e58d9af49d2" containerName="pruner" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188650 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5be75ac0-b221-4c59-b9c0-67d4b77f7d86" containerName="oauth-openshift" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188662 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="c8bc4ae7-ab51-41fd-b455-acbf54499fde" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188669 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="3bd6783c-cafd-4db5-b6df-ae558e765bd4" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188679 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0e0771c-be02-4e3d-933d-707dc8a76351" containerName="registry-server" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.188687 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c3417c9-6ea6-45ed-b593-3a8cea5001a6" containerName="pruner" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.189037 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.200939 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7b49777cd7-xsc74"] Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287588 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-provider-selection\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287670 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-policies\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287718 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-idp-0-file-data\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287756 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-service-ca\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287786 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-login\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287829 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mk88n\" (UniqueName: \"kubernetes.io/projected/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-kube-api-access-mk88n\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287871 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-error\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287894 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-trusted-ca-bundle\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287928 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-session\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287950 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-serving-cert\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287970 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-ocp-branding-template\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.287993 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-router-certs\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288012 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-cliconfig\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288027 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-dir\") pod \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\" (UID: \"5be75ac0-b221-4c59-b9c0-67d4b77f7d86\") " Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288284 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-error\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288310 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288335 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4560671-1aed-4308-8725-4aa4bf05e90f-audit-dir\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288357 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288383 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6r8r\" (UniqueName: \"kubernetes.io/projected/e4560671-1aed-4308-8725-4aa4bf05e90f-kube-api-access-q6r8r\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288408 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-login\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288425 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288439 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288460 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288494 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288509 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-audit-policies\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288529 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288555 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288577 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-session\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.288966 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.289284 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.289703 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.289834 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.289860 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.294434 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.294695 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.294928 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.295722 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.296452 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.297154 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.297243 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.297365 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.302348 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-kube-api-access-mk88n" (OuterVolumeSpecName: "kube-api-access-mk88n") pod "5be75ac0-b221-4c59-b9c0-67d4b77f7d86" (UID: "5be75ac0-b221-4c59-b9c0-67d4b77f7d86"). InnerVolumeSpecName "kube-api-access-mk88n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390351 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390412 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4560671-1aed-4308-8725-4aa4bf05e90f-audit-dir\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390437 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390462 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6r8r\" (UniqueName: \"kubernetes.io/projected/e4560671-1aed-4308-8725-4aa4bf05e90f-kube-api-access-q6r8r\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390486 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-login\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390503 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390520 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390539 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390571 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390594 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-audit-policies\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390613 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390635 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390654 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-session\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390681 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-error\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390715 4868 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-policies\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390726 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390746 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390755 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390765 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mk88n\" (UniqueName: \"kubernetes.io/projected/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-kube-api-access-mk88n\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390774 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390785 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390794 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390804 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390814 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390824 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390833 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390842 4868 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-audit-dir\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.390852 4868 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/5be75ac0-b221-4c59-b9c0-67d4b77f7d86-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.391792 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-audit-policies\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.392288 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-service-ca\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.392311 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.392980 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/e4560671-1aed-4308-8725-4aa4bf05e90f-audit-dir\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.393524 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.396670 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-error\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.396673 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-router-certs\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.396790 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.396853 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.396856 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.396934 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-template-login\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.397324 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-system-session\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.397592 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/e4560671-1aed-4308-8725-4aa4bf05e90f-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.407623 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6r8r\" (UniqueName: \"kubernetes.io/projected/e4560671-1aed-4308-8725-4aa4bf05e90f-kube-api-access-q6r8r\") pod \"oauth-openshift-7b49777cd7-xsc74\" (UID: \"e4560671-1aed-4308-8725-4aa4bf05e90f\") " pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.513082 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.619876 4868 generic.go:334] "Generic (PLEG): container finished" podID="5be75ac0-b221-4c59-b9c0-67d4b77f7d86" containerID="7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811" exitCode=0 Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.619921 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" event={"ID":"5be75ac0-b221-4c59-b9c0-67d4b77f7d86","Type":"ContainerDied","Data":"7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811"} Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.619950 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" event={"ID":"5be75ac0-b221-4c59-b9c0-67d4b77f7d86","Type":"ContainerDied","Data":"ac8db360141b2c80e4c2e4fcae9e611554602ddc7ea317f5fbbb2b119f538ce7"} Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.619970 4868 scope.go:117] "RemoveContainer" containerID="7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.620107 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-cdsl9" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.646697 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cdsl9"] Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.649719 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-cdsl9"] Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.651483 4868 scope.go:117] "RemoveContainer" containerID="7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811" Oct 03 12:55:02 crc kubenswrapper[4868]: E1003 12:55:02.651918 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811\": container with ID starting with 7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811 not found: ID does not exist" containerID="7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.651962 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811"} err="failed to get container status \"7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811\": rpc error: code = NotFound desc = could not find container \"7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811\": container with ID starting with 7a5fb025f394e6939360ad56427df8a11208b5f78b2966889453239a7a66a811 not found: ID does not exist" Oct 03 12:55:02 crc kubenswrapper[4868]: I1003 12:55:02.887683 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7b49777cd7-xsc74"] Oct 03 12:55:03 crc kubenswrapper[4868]: I1003 12:55:03.627781 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" event={"ID":"e4560671-1aed-4308-8725-4aa4bf05e90f","Type":"ContainerStarted","Data":"f5b71259629536fd35c769abef7f63ab1f00eb9e201fc0e70acf419045b1521b"} Oct 03 12:55:03 crc kubenswrapper[4868]: I1003 12:55:03.627830 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" event={"ID":"e4560671-1aed-4308-8725-4aa4bf05e90f","Type":"ContainerStarted","Data":"f3c57fd4739648031190df1f16d27b9e769e1706bb5579342b97ca612f6c9bb3"} Oct 03 12:55:03 crc kubenswrapper[4868]: I1003 12:55:03.628124 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:03 crc kubenswrapper[4868]: I1003 12:55:03.636630 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" Oct 03 12:55:03 crc kubenswrapper[4868]: I1003 12:55:03.650556 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7b49777cd7-xsc74" podStartSLOduration=27.650540343 podStartE2EDuration="27.650540343s" podCreationTimestamp="2025-10-03 12:54:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:55:03.649487193 +0000 UTC m=+299.859336259" watchObservedRunningTime="2025-10-03 12:55:03.650540343 +0000 UTC m=+299.860389409" Oct 03 12:55:04 crc kubenswrapper[4868]: I1003 12:55:04.550746 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5be75ac0-b221-4c59-b9c0-67d4b77f7d86" path="/var/lib/kubelet/pods/5be75ac0-b221-4c59-b9c0-67d4b77f7d86/volumes" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.162976 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q8vcv"] Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.164354 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-q8vcv" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="registry-server" containerID="cri-o://1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535" gracePeriod=30 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.178440 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9gtqf"] Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.178816 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9gtqf" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="registry-server" containerID="cri-o://3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf" gracePeriod=30 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.194319 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qjpnr"] Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.194765 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" containerName="marketplace-operator" containerID="cri-o://0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3" gracePeriod=30 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.201277 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7d8ll"] Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.201765 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7d8ll" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="registry-server" containerID="cri-o://59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6" gracePeriod=30 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.214203 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rpjpr"] Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.215150 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.231212 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fcf5t"] Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.231882 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-fcf5t" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="registry-server" containerID="cri-o://ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc" gracePeriod=30 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.234824 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rpjpr"] Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.352303 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f135c25-3c52-475a-9833-042496477d82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.352378 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wjk9\" (UniqueName: \"kubernetes.io/projected/5f135c25-3c52-475a-9833-042496477d82-kube-api-access-6wjk9\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.352423 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5f135c25-3c52-475a-9833-042496477d82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.453800 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5f135c25-3c52-475a-9833-042496477d82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.453839 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wjk9\" (UniqueName: \"kubernetes.io/projected/5f135c25-3c52-475a-9833-042496477d82-kube-api-access-6wjk9\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.453951 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f135c25-3c52-475a-9833-042496477d82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.455472 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5f135c25-3c52-475a-9833-042496477d82-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.467998 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/5f135c25-3c52-475a-9833-042496477d82-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.472488 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wjk9\" (UniqueName: \"kubernetes.io/projected/5f135c25-3c52-475a-9833-042496477d82-kube-api-access-6wjk9\") pod \"marketplace-operator-79b997595-rpjpr\" (UID: \"5f135c25-3c52-475a-9833-042496477d82\") " pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.594170 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.595629 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.609660 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.637473 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.644739 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.677261 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769007 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-operator-metrics\") pod \"346d964e-9d9d-4175-9828-ba55c3c31778\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769133 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-catalog-content\") pod \"9725f594-2bef-441e-9407-8712be581aa9\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769170 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8jz75\" (UniqueName: \"kubernetes.io/projected/c39a6e17-1832-4321-a2a1-35adc3dd841b-kube-api-access-8jz75\") pod \"c39a6e17-1832-4321-a2a1-35adc3dd841b\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769203 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-utilities\") pod \"f74dd718-f443-4005-a447-f2384a2f218d\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769232 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-catalog-content\") pod \"f74dd718-f443-4005-a447-f2384a2f218d\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769248 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxpbb\" (UniqueName: \"kubernetes.io/projected/f74dd718-f443-4005-a447-f2384a2f218d-kube-api-access-fxpbb\") pod \"f74dd718-f443-4005-a447-f2384a2f218d\" (UID: \"f74dd718-f443-4005-a447-f2384a2f218d\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769273 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-utilities\") pod \"c739901a-792b-4b7b-958d-5fcba129ff22\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769290 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-utilities\") pod \"9725f594-2bef-441e-9407-8712be581aa9\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769306 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-catalog-content\") pod \"c39a6e17-1832-4321-a2a1-35adc3dd841b\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769323 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-utilities\") pod \"c39a6e17-1832-4321-a2a1-35adc3dd841b\" (UID: \"c39a6e17-1832-4321-a2a1-35adc3dd841b\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769341 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-catalog-content\") pod \"c739901a-792b-4b7b-958d-5fcba129ff22\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769370 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d89dh\" (UniqueName: \"kubernetes.io/projected/9725f594-2bef-441e-9407-8712be581aa9-kube-api-access-d89dh\") pod \"9725f594-2bef-441e-9407-8712be581aa9\" (UID: \"9725f594-2bef-441e-9407-8712be581aa9\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769395 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtqlp\" (UniqueName: \"kubernetes.io/projected/c739901a-792b-4b7b-958d-5fcba129ff22-kube-api-access-jtqlp\") pod \"c739901a-792b-4b7b-958d-5fcba129ff22\" (UID: \"c739901a-792b-4b7b-958d-5fcba129ff22\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769412 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wgdw5\" (UniqueName: \"kubernetes.io/projected/346d964e-9d9d-4175-9828-ba55c3c31778-kube-api-access-wgdw5\") pod \"346d964e-9d9d-4175-9828-ba55c3c31778\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.769432 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-trusted-ca\") pod \"346d964e-9d9d-4175-9828-ba55c3c31778\" (UID: \"346d964e-9d9d-4175-9828-ba55c3c31778\") " Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.770990 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-utilities" (OuterVolumeSpecName: "utilities") pod "c39a6e17-1832-4321-a2a1-35adc3dd841b" (UID: "c39a6e17-1832-4321-a2a1-35adc3dd841b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.771403 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-utilities" (OuterVolumeSpecName: "utilities") pod "9725f594-2bef-441e-9407-8712be581aa9" (UID: "9725f594-2bef-441e-9407-8712be581aa9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.771825 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-utilities" (OuterVolumeSpecName: "utilities") pod "f74dd718-f443-4005-a447-f2384a2f218d" (UID: "f74dd718-f443-4005-a447-f2384a2f218d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.771853 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "346d964e-9d9d-4175-9828-ba55c3c31778" (UID: "346d964e-9d9d-4175-9828-ba55c3c31778"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.771886 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-utilities" (OuterVolumeSpecName: "utilities") pod "c739901a-792b-4b7b-958d-5fcba129ff22" (UID: "c739901a-792b-4b7b-958d-5fcba129ff22"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.774088 4868 generic.go:334] "Generic (PLEG): container finished" podID="c739901a-792b-4b7b-958d-5fcba129ff22" containerID="59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6" exitCode=0 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.774139 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7d8ll" event={"ID":"c739901a-792b-4b7b-958d-5fcba129ff22","Type":"ContainerDied","Data":"59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.774166 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7d8ll" event={"ID":"c739901a-792b-4b7b-958d-5fcba129ff22","Type":"ContainerDied","Data":"dab8721334ec9901c83db76f3dcbc5a3d534819ab172c2f6fb793df9dac8ebe5"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.774183 4868 scope.go:117] "RemoveContainer" containerID="59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.774299 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7d8ll" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.779640 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346d964e-9d9d-4175-9828-ba55c3c31778-kube-api-access-wgdw5" (OuterVolumeSpecName: "kube-api-access-wgdw5") pod "346d964e-9d9d-4175-9828-ba55c3c31778" (UID: "346d964e-9d9d-4175-9828-ba55c3c31778"). InnerVolumeSpecName "kube-api-access-wgdw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.780042 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c739901a-792b-4b7b-958d-5fcba129ff22-kube-api-access-jtqlp" (OuterVolumeSpecName: "kube-api-access-jtqlp") pod "c739901a-792b-4b7b-958d-5fcba129ff22" (UID: "c739901a-792b-4b7b-958d-5fcba129ff22"). InnerVolumeSpecName "kube-api-access-jtqlp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.780226 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c39a6e17-1832-4321-a2a1-35adc3dd841b-kube-api-access-8jz75" (OuterVolumeSpecName: "kube-api-access-8jz75") pod "c39a6e17-1832-4321-a2a1-35adc3dd841b" (UID: "c39a6e17-1832-4321-a2a1-35adc3dd841b"). InnerVolumeSpecName "kube-api-access-8jz75". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.782766 4868 generic.go:334] "Generic (PLEG): container finished" podID="9725f594-2bef-441e-9407-8712be581aa9" containerID="ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc" exitCode=0 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.782831 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fcf5t" event={"ID":"9725f594-2bef-441e-9407-8712be581aa9","Type":"ContainerDied","Data":"ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.782921 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-fcf5t" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.783149 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-fcf5t" event={"ID":"9725f594-2bef-441e-9407-8712be581aa9","Type":"ContainerDied","Data":"ba119737a55735a13642fe000ac063c0d2c94dd42a9f8bbae98b4615fb2bb730"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.786937 4868 generic.go:334] "Generic (PLEG): container finished" podID="346d964e-9d9d-4175-9828-ba55c3c31778" containerID="0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3" exitCode=0 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.787019 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.787286 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" event={"ID":"346d964e-9d9d-4175-9828-ba55c3c31778","Type":"ContainerDied","Data":"0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.787316 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-qjpnr" event={"ID":"346d964e-9d9d-4175-9828-ba55c3c31778","Type":"ContainerDied","Data":"1e0a17adcd5a8103f8531b5fa6785d1805c9b7661e253b3ff28c5e217a9da554"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.787594 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "346d964e-9d9d-4175-9828-ba55c3c31778" (UID: "346d964e-9d9d-4175-9828-ba55c3c31778"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.788641 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f74dd718-f443-4005-a447-f2384a2f218d-kube-api-access-fxpbb" (OuterVolumeSpecName: "kube-api-access-fxpbb") pod "f74dd718-f443-4005-a447-f2384a2f218d" (UID: "f74dd718-f443-4005-a447-f2384a2f218d"). InnerVolumeSpecName "kube-api-access-fxpbb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.789109 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9725f594-2bef-441e-9407-8712be581aa9-kube-api-access-d89dh" (OuterVolumeSpecName: "kube-api-access-d89dh") pod "9725f594-2bef-441e-9407-8712be581aa9" (UID: "9725f594-2bef-441e-9407-8712be581aa9"). InnerVolumeSpecName "kube-api-access-d89dh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.791514 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c739901a-792b-4b7b-958d-5fcba129ff22" (UID: "c739901a-792b-4b7b-958d-5fcba129ff22"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.794799 4868 generic.go:334] "Generic (PLEG): container finished" podID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerID="3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf" exitCode=0 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.794858 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9gtqf" event={"ID":"c39a6e17-1832-4321-a2a1-35adc3dd841b","Type":"ContainerDied","Data":"3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.794879 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9gtqf" event={"ID":"c39a6e17-1832-4321-a2a1-35adc3dd841b","Type":"ContainerDied","Data":"4bbdc50fecbb16e9a138f25444f1cd6605ebe5783ade2a9c46c23952c7a3349b"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.794952 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9gtqf" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.801740 4868 generic.go:334] "Generic (PLEG): container finished" podID="f74dd718-f443-4005-a447-f2384a2f218d" containerID="1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535" exitCode=0 Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.801766 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8vcv" event={"ID":"f74dd718-f443-4005-a447-f2384a2f218d","Type":"ContainerDied","Data":"1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.801788 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-q8vcv" event={"ID":"f74dd718-f443-4005-a447-f2384a2f218d","Type":"ContainerDied","Data":"ab11c3f591500723771acfca7c746e149f43663bdadccb724c87d98c66d60576"} Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.801825 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-q8vcv" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.807499 4868 scope.go:117] "RemoveContainer" containerID="0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.832987 4868 scope.go:117] "RemoveContainer" containerID="6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.836852 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f74dd718-f443-4005-a447-f2384a2f218d" (UID: "f74dd718-f443-4005-a447-f2384a2f218d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.846969 4868 scope.go:117] "RemoveContainer" containerID="59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.847380 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6\": container with ID starting with 59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6 not found: ID does not exist" containerID="59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.847416 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6"} err="failed to get container status \"59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6\": rpc error: code = NotFound desc = could not find container \"59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6\": container with ID starting with 59ede7736eb337156a28613c935422a0019ac5f4188fe05461d63385714fd4f6 not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.847444 4868 scope.go:117] "RemoveContainer" containerID="0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.847806 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b\": container with ID starting with 0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b not found: ID does not exist" containerID="0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.847886 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b"} err="failed to get container status \"0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b\": rpc error: code = NotFound desc = could not find container \"0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b\": container with ID starting with 0feb90028a5fe8175cf6f8a6a95ed02a71b8780f723a0e55379df77af7208e2b not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.847928 4868 scope.go:117] "RemoveContainer" containerID="6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.853827 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412\": container with ID starting with 6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412 not found: ID does not exist" containerID="6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.853868 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412"} err="failed to get container status \"6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412\": rpc error: code = NotFound desc = could not find container \"6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412\": container with ID starting with 6e78aee5a0ec6d59a5a5e430cbd4e837b1eba7896bbd96b9409d6a22cfc75412 not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.853896 4868 scope.go:117] "RemoveContainer" containerID="ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.859083 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c39a6e17-1832-4321-a2a1-35adc3dd841b" (UID: "c39a6e17-1832-4321-a2a1-35adc3dd841b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.869638 4868 scope.go:117] "RemoveContainer" containerID="9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870281 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d89dh\" (UniqueName: \"kubernetes.io/projected/9725f594-2bef-441e-9407-8712be581aa9-kube-api-access-d89dh\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870310 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtqlp\" (UniqueName: \"kubernetes.io/projected/c739901a-792b-4b7b-958d-5fcba129ff22-kube-api-access-jtqlp\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870321 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wgdw5\" (UniqueName: \"kubernetes.io/projected/346d964e-9d9d-4175-9828-ba55c3c31778-kube-api-access-wgdw5\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870333 4868 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870344 4868 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/346d964e-9d9d-4175-9828-ba55c3c31778-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870358 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8jz75\" (UniqueName: \"kubernetes.io/projected/c39a6e17-1832-4321-a2a1-35adc3dd841b-kube-api-access-8jz75\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870371 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870385 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f74dd718-f443-4005-a447-f2384a2f218d-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870396 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxpbb\" (UniqueName: \"kubernetes.io/projected/f74dd718-f443-4005-a447-f2384a2f218d-kube-api-access-fxpbb\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870407 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870417 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870428 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870438 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c39a6e17-1832-4321-a2a1-35adc3dd841b-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.870451 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c739901a-792b-4b7b-958d-5fcba129ff22-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.877109 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9725f594-2bef-441e-9407-8712be581aa9" (UID: "9725f594-2bef-441e-9407-8712be581aa9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.884460 4868 scope.go:117] "RemoveContainer" containerID="229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.896187 4868 scope.go:117] "RemoveContainer" containerID="ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.896515 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc\": container with ID starting with ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc not found: ID does not exist" containerID="ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.896556 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc"} err="failed to get container status \"ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc\": rpc error: code = NotFound desc = could not find container \"ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc\": container with ID starting with ad8fd1e4c5519c187d69cc22ed6094e7733341bb81a2fb3ac92795c2f1d38dbc not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.896592 4868 scope.go:117] "RemoveContainer" containerID="9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.896827 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2\": container with ID starting with 9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2 not found: ID does not exist" containerID="9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.896857 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2"} err="failed to get container status \"9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2\": rpc error: code = NotFound desc = could not find container \"9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2\": container with ID starting with 9d92f9189c8e85144ddaec73177a985fa83ce004f6cd9df9bb03ce3780fef1f2 not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.896880 4868 scope.go:117] "RemoveContainer" containerID="229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.897300 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb\": container with ID starting with 229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb not found: ID does not exist" containerID="229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.897397 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb"} err="failed to get container status \"229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb\": rpc error: code = NotFound desc = could not find container \"229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb\": container with ID starting with 229dc67140156b2cc4ca029a5e2a13fa67bd0de0eeca293a45d6776cf9d6bfbb not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.897450 4868 scope.go:117] "RemoveContainer" containerID="0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.907576 4868 scope.go:117] "RemoveContainer" containerID="0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.908100 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3\": container with ID starting with 0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3 not found: ID does not exist" containerID="0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.908132 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3"} err="failed to get container status \"0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3\": rpc error: code = NotFound desc = could not find container \"0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3\": container with ID starting with 0e9249ee2671d7b717113a887899b4234dbe7de43fe0ca90f57fd00a070c22a3 not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.908152 4868 scope.go:117] "RemoveContainer" containerID="3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.931387 4868 scope.go:117] "RemoveContainer" containerID="d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.945005 4868 scope.go:117] "RemoveContainer" containerID="79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.961198 4868 scope.go:117] "RemoveContainer" containerID="3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.961700 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf\": container with ID starting with 3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf not found: ID does not exist" containerID="3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.961754 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf"} err="failed to get container status \"3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf\": rpc error: code = NotFound desc = could not find container \"3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf\": container with ID starting with 3a07fda101678b1872ba71fd2dc03110ec81c181feae887d29bae10e9f8179bf not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.961951 4868 scope.go:117] "RemoveContainer" containerID="d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.962440 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57\": container with ID starting with d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57 not found: ID does not exist" containerID="d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.962478 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57"} err="failed to get container status \"d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57\": rpc error: code = NotFound desc = could not find container \"d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57\": container with ID starting with d0fb85fd84dc404c1978d0656029ccc7dec26a403140b73d3a9b77287152bb57 not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.962505 4868 scope.go:117] "RemoveContainer" containerID="79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12" Oct 03 12:55:32 crc kubenswrapper[4868]: E1003 12:55:32.963449 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12\": container with ID starting with 79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12 not found: ID does not exist" containerID="79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.963503 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12"} err="failed to get container status \"79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12\": rpc error: code = NotFound desc = could not find container \"79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12\": container with ID starting with 79917d85a70b9aef6d67dee8e32299ed27acc74a627b580952a8a538ff787c12 not found: ID does not exist" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.963536 4868 scope.go:117] "RemoveContainer" containerID="1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.971602 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9725f594-2bef-441e-9407-8712be581aa9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.975407 4868 scope.go:117] "RemoveContainer" containerID="efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d" Oct 03 12:55:32 crc kubenswrapper[4868]: I1003 12:55:32.989742 4868 scope.go:117] "RemoveContainer" containerID="c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.003258 4868 scope.go:117] "RemoveContainer" containerID="1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535" Oct 03 12:55:33 crc kubenswrapper[4868]: E1003 12:55:33.003763 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535\": container with ID starting with 1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535 not found: ID does not exist" containerID="1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.003813 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535"} err="failed to get container status \"1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535\": rpc error: code = NotFound desc = could not find container \"1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535\": container with ID starting with 1aceabd8283a6132974a44fce9aab80a4c11bcfb3af8e778e27ce585f7d1f535 not found: ID does not exist" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.003841 4868 scope.go:117] "RemoveContainer" containerID="efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d" Oct 03 12:55:33 crc kubenswrapper[4868]: E1003 12:55:33.004192 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d\": container with ID starting with efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d not found: ID does not exist" containerID="efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.004221 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d"} err="failed to get container status \"efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d\": rpc error: code = NotFound desc = could not find container \"efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d\": container with ID starting with efff13f3504fa8c5ac36777fb3ff44922eac6a18bc39d1723be231aec17d9a7d not found: ID does not exist" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.004244 4868 scope.go:117] "RemoveContainer" containerID="c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428" Oct 03 12:55:33 crc kubenswrapper[4868]: E1003 12:55:33.004539 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428\": container with ID starting with c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428 not found: ID does not exist" containerID="c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.004563 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428"} err="failed to get container status \"c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428\": rpc error: code = NotFound desc = could not find container \"c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428\": container with ID starting with c4f3eb1f8d1955cd3ae138eb7ab00f9f57658311476927355d9ea3fad0695428 not found: ID does not exist" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.026205 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rpjpr"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.112339 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7d8ll"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.115443 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7d8ll"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.168916 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9gtqf"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.176037 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9gtqf"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.188289 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-fcf5t"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.197521 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-fcf5t"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.204930 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-q8vcv"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.206914 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-q8vcv"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.225270 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qjpnr"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.241441 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-qjpnr"] Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.815669 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" event={"ID":"5f135c25-3c52-475a-9833-042496477d82","Type":"ContainerStarted","Data":"e8249f08f58863af8c1f2fcffb9ba1d5fb3b528e752dc20a8b636a0bdb337287"} Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.815734 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" event={"ID":"5f135c25-3c52-475a-9833-042496477d82","Type":"ContainerStarted","Data":"c5f05b994e7c38843546b81b2f1016aa3a42fe0eea4ef15ab1f60fb69aa8cbb9"} Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.816240 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.818789 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" Oct 03 12:55:33 crc kubenswrapper[4868]: I1003 12:55:33.834163 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rpjpr" podStartSLOduration=1.8341410059999999 podStartE2EDuration="1.834141006s" podCreationTimestamp="2025-10-03 12:55:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:55:33.832343928 +0000 UTC m=+330.042192994" watchObservedRunningTime="2025-10-03 12:55:33.834141006 +0000 UTC m=+330.043990092" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.385972 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bffqk"] Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386207 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386222 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386235 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386242 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386254 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386261 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386274 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" containerName="marketplace-operator" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386281 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" containerName="marketplace-operator" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386291 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386299 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386306 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386311 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386318 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386323 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="extract-content" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386333 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386339 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386348 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386353 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386362 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386367 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386374 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386379 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386386 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386392 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="extract-utilities" Oct 03 12:55:34 crc kubenswrapper[4868]: E1003 12:55:34.386400 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386405 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386484 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386495 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" containerName="marketplace-operator" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386502 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="9725f594-2bef-441e-9407-8712be581aa9" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386510 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="f74dd718-f443-4005-a447-f2384a2f218d" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.386535 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" containerName="registry-server" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.387228 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.389644 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.396617 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bffqk"] Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.498797 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-catalog-content\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.498849 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-utilities\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.498875 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8mk8\" (UniqueName: \"kubernetes.io/projected/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-kube-api-access-n8mk8\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.550560 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="346d964e-9d9d-4175-9828-ba55c3c31778" path="/var/lib/kubelet/pods/346d964e-9d9d-4175-9828-ba55c3c31778/volumes" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.551038 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9725f594-2bef-441e-9407-8712be581aa9" path="/var/lib/kubelet/pods/9725f594-2bef-441e-9407-8712be581aa9/volumes" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.551599 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c39a6e17-1832-4321-a2a1-35adc3dd841b" path="/var/lib/kubelet/pods/c39a6e17-1832-4321-a2a1-35adc3dd841b/volumes" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.552603 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c739901a-792b-4b7b-958d-5fcba129ff22" path="/var/lib/kubelet/pods/c739901a-792b-4b7b-958d-5fcba129ff22/volumes" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.553476 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f74dd718-f443-4005-a447-f2384a2f218d" path="/var/lib/kubelet/pods/f74dd718-f443-4005-a447-f2384a2f218d/volumes" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.593830 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r767z"] Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.595086 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.596493 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r767z"] Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.605682 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8mk8\" (UniqueName: \"kubernetes.io/projected/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-kube-api-access-n8mk8\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.605730 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.606164 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-catalog-content\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.606195 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-utilities\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.606586 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-utilities\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.607153 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-catalog-content\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.629724 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8mk8\" (UniqueName: \"kubernetes.io/projected/bc3e1be6-720c-4877-ab1f-a889f6eeb9fa-kube-api-access-n8mk8\") pod \"redhat-marketplace-bffqk\" (UID: \"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa\") " pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.706894 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jncjv\" (UniqueName: \"kubernetes.io/projected/c99cb29f-a3c3-4085-a7ea-596646e293f1-kube-api-access-jncjv\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.707024 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c99cb29f-a3c3-4085-a7ea-596646e293f1-catalog-content\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.707259 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c99cb29f-a3c3-4085-a7ea-596646e293f1-utilities\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.712620 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.808449 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c99cb29f-a3c3-4085-a7ea-596646e293f1-utilities\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.808508 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jncjv\" (UniqueName: \"kubernetes.io/projected/c99cb29f-a3c3-4085-a7ea-596646e293f1-kube-api-access-jncjv\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.808536 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c99cb29f-a3c3-4085-a7ea-596646e293f1-catalog-content\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.809101 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c99cb29f-a3c3-4085-a7ea-596646e293f1-catalog-content\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.809117 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c99cb29f-a3c3-4085-a7ea-596646e293f1-utilities\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.828011 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jncjv\" (UniqueName: \"kubernetes.io/projected/c99cb29f-a3c3-4085-a7ea-596646e293f1-kube-api-access-jncjv\") pod \"redhat-operators-r767z\" (UID: \"c99cb29f-a3c3-4085-a7ea-596646e293f1\") " pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.900907 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bffqk"] Oct 03 12:55:34 crc kubenswrapper[4868]: W1003 12:55:34.905884 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbc3e1be6_720c_4877_ab1f_a889f6eeb9fa.slice/crio-05a3c6f16a7ab0b3cad1174209f80a8aca3d1cafba1e2f031bf723ded3dea2a8 WatchSource:0}: Error finding container 05a3c6f16a7ab0b3cad1174209f80a8aca3d1cafba1e2f031bf723ded3dea2a8: Status 404 returned error can't find the container with id 05a3c6f16a7ab0b3cad1174209f80a8aca3d1cafba1e2f031bf723ded3dea2a8 Oct 03 12:55:34 crc kubenswrapper[4868]: I1003 12:55:34.931275 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:35 crc kubenswrapper[4868]: I1003 12:55:35.109321 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r767z"] Oct 03 12:55:35 crc kubenswrapper[4868]: W1003 12:55:35.131461 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc99cb29f_a3c3_4085_a7ea_596646e293f1.slice/crio-80b58ba6fa3a37ae0cf1eaa86bc715ec96a93b6d8d6466bcfadb465bfc8cdb93 WatchSource:0}: Error finding container 80b58ba6fa3a37ae0cf1eaa86bc715ec96a93b6d8d6466bcfadb465bfc8cdb93: Status 404 returned error can't find the container with id 80b58ba6fa3a37ae0cf1eaa86bc715ec96a93b6d8d6466bcfadb465bfc8cdb93 Oct 03 12:55:35 crc kubenswrapper[4868]: I1003 12:55:35.828142 4868 generic.go:334] "Generic (PLEG): container finished" podID="c99cb29f-a3c3-4085-a7ea-596646e293f1" containerID="6b79391f14f05cf85bd65c939d593d8ba48a2129a2dd99b723378891e79df8e2" exitCode=0 Oct 03 12:55:35 crc kubenswrapper[4868]: I1003 12:55:35.828234 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r767z" event={"ID":"c99cb29f-a3c3-4085-a7ea-596646e293f1","Type":"ContainerDied","Data":"6b79391f14f05cf85bd65c939d593d8ba48a2129a2dd99b723378891e79df8e2"} Oct 03 12:55:35 crc kubenswrapper[4868]: I1003 12:55:35.828515 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r767z" event={"ID":"c99cb29f-a3c3-4085-a7ea-596646e293f1","Type":"ContainerStarted","Data":"80b58ba6fa3a37ae0cf1eaa86bc715ec96a93b6d8d6466bcfadb465bfc8cdb93"} Oct 03 12:55:35 crc kubenswrapper[4868]: I1003 12:55:35.830381 4868 generic.go:334] "Generic (PLEG): container finished" podID="bc3e1be6-720c-4877-ab1f-a889f6eeb9fa" containerID="a1b8c015cd2722c30ab07bd5a2298e96b995c5b881dcef07360f6cff1e116f20" exitCode=0 Oct 03 12:55:35 crc kubenswrapper[4868]: I1003 12:55:35.830443 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bffqk" event={"ID":"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa","Type":"ContainerDied","Data":"a1b8c015cd2722c30ab07bd5a2298e96b995c5b881dcef07360f6cff1e116f20"} Oct 03 12:55:35 crc kubenswrapper[4868]: I1003 12:55:35.830528 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bffqk" event={"ID":"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa","Type":"ContainerStarted","Data":"05a3c6f16a7ab0b3cad1174209f80a8aca3d1cafba1e2f031bf723ded3dea2a8"} Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.784814 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bx6ln"] Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.786208 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.789901 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.796880 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bx6ln"] Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.832077 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhpz8\" (UniqueName: \"kubernetes.io/projected/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-kube-api-access-fhpz8\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.832189 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-catalog-content\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.832247 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-utilities\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.933108 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhpz8\" (UniqueName: \"kubernetes.io/projected/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-kube-api-access-fhpz8\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.933159 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-catalog-content\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.933178 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-utilities\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.933736 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-catalog-content\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.935043 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-utilities\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.987482 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-5kjld"] Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.988888 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.993490 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Oct 03 12:55:36 crc kubenswrapper[4868]: I1003 12:55:36.997817 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5kjld"] Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.034840 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb39d54-b39f-4b3e-b010-02e87203341a-utilities\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.034901 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb39d54-b39f-4b3e-b010-02e87203341a-catalog-content\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.035004 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58ffb\" (UniqueName: \"kubernetes.io/projected/feb39d54-b39f-4b3e-b010-02e87203341a-kube-api-access-58ffb\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.136466 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58ffb\" (UniqueName: \"kubernetes.io/projected/feb39d54-b39f-4b3e-b010-02e87203341a-kube-api-access-58ffb\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.136568 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb39d54-b39f-4b3e-b010-02e87203341a-utilities\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.136609 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb39d54-b39f-4b3e-b010-02e87203341a-catalog-content\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.261120 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhpz8\" (UniqueName: \"kubernetes.io/projected/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-kube-api-access-fhpz8\") pod \"certified-operators-bx6ln\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.289980 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58ffb\" (UniqueName: \"kubernetes.io/projected/feb39d54-b39f-4b3e-b010-02e87203341a-kube-api-access-58ffb\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.323075 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/feb39d54-b39f-4b3e-b010-02e87203341a-utilities\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.323131 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/feb39d54-b39f-4b3e-b010-02e87203341a-catalog-content\") pod \"community-operators-5kjld\" (UID: \"feb39d54-b39f-4b3e-b010-02e87203341a\") " pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.410517 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.561313 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.764704 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-5kjld"] Oct 03 12:55:37 crc kubenswrapper[4868]: W1003 12:55:37.778847 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfeb39d54_b39f_4b3e_b010_02e87203341a.slice/crio-2ccdda26c373ce76d6632cbc078a5d0b782ff777406858f68acb20254135bd76 WatchSource:0}: Error finding container 2ccdda26c373ce76d6632cbc078a5d0b782ff777406858f68acb20254135bd76: Status 404 returned error can't find the container with id 2ccdda26c373ce76d6632cbc078a5d0b782ff777406858f68acb20254135bd76 Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.800702 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bx6ln"] Oct 03 12:55:37 crc kubenswrapper[4868]: W1003 12:55:37.816368 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod055b5bf7_16bd_4acb_8d96_a5678f86f0c2.slice/crio-534906dfc9579a9527cdeef2e93dea57974c14eb2fc44152b7d0e73b3554e77e WatchSource:0}: Error finding container 534906dfc9579a9527cdeef2e93dea57974c14eb2fc44152b7d0e73b3554e77e: Status 404 returned error can't find the container with id 534906dfc9579a9527cdeef2e93dea57974c14eb2fc44152b7d0e73b3554e77e Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.857434 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kjld" event={"ID":"feb39d54-b39f-4b3e-b010-02e87203341a","Type":"ContainerStarted","Data":"2ccdda26c373ce76d6632cbc078a5d0b782ff777406858f68acb20254135bd76"} Oct 03 12:55:37 crc kubenswrapper[4868]: I1003 12:55:37.860290 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bx6ln" event={"ID":"055b5bf7-16bd-4acb-8d96-a5678f86f0c2","Type":"ContainerStarted","Data":"534906dfc9579a9527cdeef2e93dea57974c14eb2fc44152b7d0e73b3554e77e"} Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.867674 4868 generic.go:334] "Generic (PLEG): container finished" podID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerID="20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542" exitCode=0 Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.867810 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bx6ln" event={"ID":"055b5bf7-16bd-4acb-8d96-a5678f86f0c2","Type":"ContainerDied","Data":"20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542"} Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.871673 4868 generic.go:334] "Generic (PLEG): container finished" podID="c99cb29f-a3c3-4085-a7ea-596646e293f1" containerID="dae5d010d19760e14f30db79302113d4ae3dae8ce167f2ef946014a14ae770c5" exitCode=0 Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.871738 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r767z" event={"ID":"c99cb29f-a3c3-4085-a7ea-596646e293f1","Type":"ContainerDied","Data":"dae5d010d19760e14f30db79302113d4ae3dae8ce167f2ef946014a14ae770c5"} Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.874167 4868 generic.go:334] "Generic (PLEG): container finished" podID="bc3e1be6-720c-4877-ab1f-a889f6eeb9fa" containerID="5b6ddbf572cfc168eb2c0b527dacfc272a80bd2d8911fa87d9127497aeb90b88" exitCode=0 Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.874224 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bffqk" event={"ID":"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa","Type":"ContainerDied","Data":"5b6ddbf572cfc168eb2c0b527dacfc272a80bd2d8911fa87d9127497aeb90b88"} Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.876462 4868 generic.go:334] "Generic (PLEG): container finished" podID="feb39d54-b39f-4b3e-b010-02e87203341a" containerID="0d4df7d9150bc945af7e82b13d0bd3cbc521bc0343cdc5091bf96c34d4f17a66" exitCode=0 Oct 03 12:55:38 crc kubenswrapper[4868]: I1003 12:55:38.876488 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kjld" event={"ID":"feb39d54-b39f-4b3e-b010-02e87203341a","Type":"ContainerDied","Data":"0d4df7d9150bc945af7e82b13d0bd3cbc521bc0343cdc5091bf96c34d4f17a66"} Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.897486 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bffqk" event={"ID":"bc3e1be6-720c-4877-ab1f-a889f6eeb9fa","Type":"ContainerStarted","Data":"95ac840463b3e8333c548da5e12e96aec374cabfefec0b24ef19ae45e3742d9b"} Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.899277 4868 generic.go:334] "Generic (PLEG): container finished" podID="feb39d54-b39f-4b3e-b010-02e87203341a" containerID="0205e59fc520cc11a1850a6aff65932bbc3b8aa6141ad17823ea1e33673b9ed1" exitCode=0 Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.899364 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kjld" event={"ID":"feb39d54-b39f-4b3e-b010-02e87203341a","Type":"ContainerDied","Data":"0205e59fc520cc11a1850a6aff65932bbc3b8aa6141ad17823ea1e33673b9ed1"} Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.900921 4868 generic.go:334] "Generic (PLEG): container finished" podID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerID="49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306" exitCode=0 Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.900968 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bx6ln" event={"ID":"055b5bf7-16bd-4acb-8d96-a5678f86f0c2","Type":"ContainerDied","Data":"49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306"} Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.905198 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r767z" event={"ID":"c99cb29f-a3c3-4085-a7ea-596646e293f1","Type":"ContainerStarted","Data":"594f8951c24156e0af589d8fa8a97fbdae2eb9a4cc492dd6ab2877bb414403d1"} Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.921117 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bffqk" podStartSLOduration=3.77697848 podStartE2EDuration="7.92110044s" podCreationTimestamp="2025-10-03 12:55:34 +0000 UTC" firstStartedPulling="2025-10-03 12:55:35.831469307 +0000 UTC m=+332.041318373" lastFinishedPulling="2025-10-03 12:55:39.975591267 +0000 UTC m=+336.185440333" observedRunningTime="2025-10-03 12:55:41.918688961 +0000 UTC m=+338.128538047" watchObservedRunningTime="2025-10-03 12:55:41.92110044 +0000 UTC m=+338.130949506" Oct 03 12:55:41 crc kubenswrapper[4868]: I1003 12:55:41.991777 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r767z" podStartSLOduration=3.923538674 podStartE2EDuration="7.991751151s" podCreationTimestamp="2025-10-03 12:55:34 +0000 UTC" firstStartedPulling="2025-10-03 12:55:35.829476842 +0000 UTC m=+332.039325908" lastFinishedPulling="2025-10-03 12:55:39.897689319 +0000 UTC m=+336.107538385" observedRunningTime="2025-10-03 12:55:41.989644883 +0000 UTC m=+338.199493969" watchObservedRunningTime="2025-10-03 12:55:41.991751151 +0000 UTC m=+338.201600217" Oct 03 12:55:42 crc kubenswrapper[4868]: I1003 12:55:42.911969 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-5kjld" event={"ID":"feb39d54-b39f-4b3e-b010-02e87203341a","Type":"ContainerStarted","Data":"ff70cb45cef05b4060d3bc6e8845055eae7f36523b45f205fad57e9a2fdd81ac"} Oct 03 12:55:42 crc kubenswrapper[4868]: I1003 12:55:42.914713 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bx6ln" event={"ID":"055b5bf7-16bd-4acb-8d96-a5678f86f0c2","Type":"ContainerStarted","Data":"7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4"} Oct 03 12:55:42 crc kubenswrapper[4868]: I1003 12:55:42.933793 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-5kjld" podStartSLOduration=3.50195728 podStartE2EDuration="6.933775037s" podCreationTimestamp="2025-10-03 12:55:36 +0000 UTC" firstStartedPulling="2025-10-03 12:55:38.879280926 +0000 UTC m=+335.089129992" lastFinishedPulling="2025-10-03 12:55:42.311098683 +0000 UTC m=+338.520947749" observedRunningTime="2025-10-03 12:55:42.93203199 +0000 UTC m=+339.141881056" watchObservedRunningTime="2025-10-03 12:55:42.933775037 +0000 UTC m=+339.143624103" Oct 03 12:55:42 crc kubenswrapper[4868]: I1003 12:55:42.948567 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bx6ln" podStartSLOduration=3.4454519279999998 podStartE2EDuration="6.948552617s" podCreationTimestamp="2025-10-03 12:55:36 +0000 UTC" firstStartedPulling="2025-10-03 12:55:38.869203898 +0000 UTC m=+335.079052954" lastFinishedPulling="2025-10-03 12:55:42.372304577 +0000 UTC m=+338.582153643" observedRunningTime="2025-10-03 12:55:42.945820608 +0000 UTC m=+339.155669674" watchObservedRunningTime="2025-10-03 12:55:42.948552617 +0000 UTC m=+339.158401683" Oct 03 12:55:44 crc kubenswrapper[4868]: I1003 12:55:44.712762 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:44 crc kubenswrapper[4868]: I1003 12:55:44.714379 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:44 crc kubenswrapper[4868]: I1003 12:55:44.756041 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:44 crc kubenswrapper[4868]: I1003 12:55:44.931471 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:44 crc kubenswrapper[4868]: I1003 12:55:44.931566 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:45 crc kubenswrapper[4868]: I1003 12:55:45.981125 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r767z" podUID="c99cb29f-a3c3-4085-a7ea-596646e293f1" containerName="registry-server" probeResult="failure" output=< Oct 03 12:55:45 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 12:55:45 crc kubenswrapper[4868]: > Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.411042 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.411359 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.447200 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.562700 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.563070 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.604043 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.978325 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-5kjld" Oct 03 12:55:47 crc kubenswrapper[4868]: I1003 12:55:47.980835 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 12:55:54 crc kubenswrapper[4868]: I1003 12:55:54.755263 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bffqk" Oct 03 12:55:54 crc kubenswrapper[4868]: I1003 12:55:54.968549 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:55:55 crc kubenswrapper[4868]: I1003 12:55:55.015660 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r767z" Oct 03 12:56:02 crc kubenswrapper[4868]: I1003 12:56:02.145134 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:56:02 crc kubenswrapper[4868]: I1003 12:56:02.145784 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 12:56:32 crc kubenswrapper[4868]: I1003 12:56:32.145976 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:56:32 crc kubenswrapper[4868]: I1003 12:56:32.146560 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.145534 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.146063 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.146121 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.146728 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"e93acf18d8e635e25e39f3282746d01e8bec7978d70317a883aafe7973414ea5"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.146793 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://e93acf18d8e635e25e39f3282746d01e8bec7978d70317a883aafe7973414ea5" gracePeriod=600 Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.309452 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="e93acf18d8e635e25e39f3282746d01e8bec7978d70317a883aafe7973414ea5" exitCode=0 Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.309512 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"e93acf18d8e635e25e39f3282746d01e8bec7978d70317a883aafe7973414ea5"} Oct 03 12:57:02 crc kubenswrapper[4868]: I1003 12:57:02.309558 4868 scope.go:117] "RemoveContainer" containerID="bd2017acf7931d1eb42be2735a519a8a545378d0ea2193d285207f296e296eea" Oct 03 12:57:03 crc kubenswrapper[4868]: I1003 12:57:03.319459 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"a6339fe657203e128036640a21a125b1864bd95a4f3ae7f1fb78d282fb38cf9a"} Oct 03 12:57:41 crc kubenswrapper[4868]: I1003 12:57:41.926843 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-sqhjq"] Oct 03 12:57:41 crc kubenswrapper[4868]: I1003 12:57:41.928231 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:41 crc kubenswrapper[4868]: I1003 12:57:41.939558 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-sqhjq"] Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.050744 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4e607c49-cd10-4ae8-93c3-212a8ca09330-registry-certificates\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.050819 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwvtw\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-kube-api-access-mwvtw\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.050855 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.050878 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-bound-sa-token\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.050937 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-registry-tls\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.051153 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4e607c49-cd10-4ae8-93c3-212a8ca09330-ca-trust-extracted\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.051226 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4e607c49-cd10-4ae8-93c3-212a8ca09330-installation-pull-secrets\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.051297 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e607c49-cd10-4ae8-93c3-212a8ca09330-trusted-ca\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.075538 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.152401 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-registry-tls\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.152469 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4e607c49-cd10-4ae8-93c3-212a8ca09330-ca-trust-extracted\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.152489 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4e607c49-cd10-4ae8-93c3-212a8ca09330-installation-pull-secrets\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.152508 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e607c49-cd10-4ae8-93c3-212a8ca09330-trusted-ca\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.152534 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4e607c49-cd10-4ae8-93c3-212a8ca09330-registry-certificates\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.152561 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwvtw\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-kube-api-access-mwvtw\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.152586 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-bound-sa-token\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.153491 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/4e607c49-cd10-4ae8-93c3-212a8ca09330-ca-trust-extracted\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.154017 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4e607c49-cd10-4ae8-93c3-212a8ca09330-trusted-ca\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.154102 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/4e607c49-cd10-4ae8-93c3-212a8ca09330-registry-certificates\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.158418 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/4e607c49-cd10-4ae8-93c3-212a8ca09330-installation-pull-secrets\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.158890 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-registry-tls\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.168412 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-bound-sa-token\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.170011 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwvtw\" (UniqueName: \"kubernetes.io/projected/4e607c49-cd10-4ae8-93c3-212a8ca09330-kube-api-access-mwvtw\") pod \"image-registry-66df7c8f76-sqhjq\" (UID: \"4e607c49-cd10-4ae8-93c3-212a8ca09330\") " pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.247393 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.436691 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-sqhjq"] Oct 03 12:57:42 crc kubenswrapper[4868]: I1003 12:57:42.566927 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" event={"ID":"4e607c49-cd10-4ae8-93c3-212a8ca09330","Type":"ContainerStarted","Data":"70ac99b9216f4455c443295eceaa700c23b2b060bdb2333fc422a5395e84fc69"} Oct 03 12:57:43 crc kubenswrapper[4868]: I1003 12:57:43.574309 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" event={"ID":"4e607c49-cd10-4ae8-93c3-212a8ca09330","Type":"ContainerStarted","Data":"e2162de732d43bca129c82fa0074b44ad341380659b9b3097e093b2a34fa9b47"} Oct 03 12:57:43 crc kubenswrapper[4868]: I1003 12:57:43.575882 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:57:43 crc kubenswrapper[4868]: I1003 12:57:43.593397 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" podStartSLOduration=2.593383004 podStartE2EDuration="2.593383004s" podCreationTimestamp="2025-10-03 12:57:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 12:57:43.593248961 +0000 UTC m=+459.803098057" watchObservedRunningTime="2025-10-03 12:57:43.593383004 +0000 UTC m=+459.803232070" Oct 03 12:58:02 crc kubenswrapper[4868]: I1003 12:58:02.253786 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-sqhjq" Oct 03 12:58:02 crc kubenswrapper[4868]: I1003 12:58:02.297181 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4cxr5"] Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.338348 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" podUID="fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" containerName="registry" containerID="cri-o://1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278" gracePeriod=30 Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.646619 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673400 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwrg7\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-kube-api-access-zwrg7\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673462 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-ca-trust-extracted\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673486 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-bound-sa-token\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673661 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673678 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-tls\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673714 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-installation-pull-secrets\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673757 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-certificates\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.673776 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-trusted-ca\") pod \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\" (UID: \"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1\") " Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.675513 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.676226 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.684171 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.685299 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.685742 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.686023 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-kube-api-access-zwrg7" (OuterVolumeSpecName: "kube-api-access-zwrg7") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "kube-api-access-zwrg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.694195 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.694603 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" (UID: "fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.775124 4868 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.775169 4868 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-bound-sa-token\") on node \"crc\" DevicePath \"\"" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.775186 4868 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-tls\") on node \"crc\" DevicePath \"\"" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.775200 4868 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.775212 4868 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-registry-certificates\") on node \"crc\" DevicePath \"\"" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.775221 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-trusted-ca\") on node \"crc\" DevicePath \"\"" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.775233 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwrg7\" (UniqueName: \"kubernetes.io/projected/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1-kube-api-access-zwrg7\") on node \"crc\" DevicePath \"\"" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.819177 4868 generic.go:334] "Generic (PLEG): container finished" podID="fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" containerID="1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278" exitCode=0 Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.819222 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" event={"ID":"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1","Type":"ContainerDied","Data":"1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278"} Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.819251 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" event={"ID":"fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1","Type":"ContainerDied","Data":"e035f32efd03e9dfeb8d3cec00c38f4b3bb32e9a79ba515521d6bbbd26c95e4c"} Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.819269 4868 scope.go:117] "RemoveContainer" containerID="1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.819337 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-4cxr5" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.835690 4868 scope.go:117] "RemoveContainer" containerID="1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278" Oct 03 12:58:27 crc kubenswrapper[4868]: E1003 12:58:27.836217 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278\": container with ID starting with 1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278 not found: ID does not exist" containerID="1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.836259 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278"} err="failed to get container status \"1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278\": rpc error: code = NotFound desc = could not find container \"1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278\": container with ID starting with 1eb02e934cdc49d9cb02ddc2247e1b4dcf16c718087d63d5a09e00822883b278 not found: ID does not exist" Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.849747 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4cxr5"] Oct 03 12:58:27 crc kubenswrapper[4868]: I1003 12:58:27.852729 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-4cxr5"] Oct 03 12:58:28 crc kubenswrapper[4868]: I1003 12:58:28.551116 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" path="/var/lib/kubelet/pods/fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1/volumes" Oct 03 12:59:02 crc kubenswrapper[4868]: I1003 12:59:02.145670 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:59:02 crc kubenswrapper[4868]: I1003 12:59:02.146419 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 12:59:04 crc kubenswrapper[4868]: I1003 12:59:04.653479 4868 scope.go:117] "RemoveContainer" containerID="917b82c8a6c04a95201b483fc33783897e2c907ca94621f6f1c7edf333708b37" Oct 03 12:59:32 crc kubenswrapper[4868]: I1003 12:59:32.145580 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 12:59:32 crc kubenswrapper[4868]: I1003 12:59:32.146562 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.138327 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5"] Oct 03 13:00:00 crc kubenswrapper[4868]: E1003 13:00:00.139119 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" containerName="registry" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.139134 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" containerName="registry" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.139253 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe20bc6b-533e-46ee-bff3-e8a7a8a5ddd1" containerName="registry" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.142567 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.145543 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.145914 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5"] Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.148042 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.311780 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzhmh\" (UniqueName: \"kubernetes.io/projected/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-kube-api-access-pzhmh\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.312176 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-config-volume\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.312204 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-secret-volume\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.413045 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzhmh\" (UniqueName: \"kubernetes.io/projected/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-kube-api-access-pzhmh\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.413132 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-config-volume\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.413163 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-secret-volume\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.414389 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-config-volume\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.427234 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-secret-volume\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.433218 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzhmh\" (UniqueName: \"kubernetes.io/projected/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-kube-api-access-pzhmh\") pod \"collect-profiles-29324940-nj5j5\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.459876 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:00 crc kubenswrapper[4868]: I1003 13:00:00.664226 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5"] Oct 03 13:00:01 crc kubenswrapper[4868]: I1003 13:00:01.338470 4868 generic.go:334] "Generic (PLEG): container finished" podID="e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" containerID="e71b335f0fb53e1fddffb2fe0a85d3e50c4ce42936b5ef0bd0c9532f0ec21053" exitCode=0 Oct 03 13:00:01 crc kubenswrapper[4868]: I1003 13:00:01.338571 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" event={"ID":"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428","Type":"ContainerDied","Data":"e71b335f0fb53e1fddffb2fe0a85d3e50c4ce42936b5ef0bd0c9532f0ec21053"} Oct 03 13:00:01 crc kubenswrapper[4868]: I1003 13:00:01.338652 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" event={"ID":"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428","Type":"ContainerStarted","Data":"a96ec1672637bf0829441d79303c0fc4d1e4709cf18a0582be81019e30e9fb59"} Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.146047 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.146714 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.146790 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.147495 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a6339fe657203e128036640a21a125b1864bd95a4f3ae7f1fb78d282fb38cf9a"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.147587 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://a6339fe657203e128036640a21a125b1864bd95a4f3ae7f1fb78d282fb38cf9a" gracePeriod=600 Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.348006 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="a6339fe657203e128036640a21a125b1864bd95a4f3ae7f1fb78d282fb38cf9a" exitCode=0 Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.348319 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"a6339fe657203e128036640a21a125b1864bd95a4f3ae7f1fb78d282fb38cf9a"} Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.348374 4868 scope.go:117] "RemoveContainer" containerID="e93acf18d8e635e25e39f3282746d01e8bec7978d70317a883aafe7973414ea5" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.575429 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.748443 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-secret-volume\") pod \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.748616 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzhmh\" (UniqueName: \"kubernetes.io/projected/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-kube-api-access-pzhmh\") pod \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.748707 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-config-volume\") pod \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\" (UID: \"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428\") " Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.749846 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-config-volume" (OuterVolumeSpecName: "config-volume") pod "e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" (UID: "e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.759323 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" (UID: "e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.759366 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-kube-api-access-pzhmh" (OuterVolumeSpecName: "kube-api-access-pzhmh") pod "e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" (UID: "e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428"). InnerVolumeSpecName "kube-api-access-pzhmh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.850615 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzhmh\" (UniqueName: \"kubernetes.io/projected/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-kube-api-access-pzhmh\") on node \"crc\" DevicePath \"\"" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.851222 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:00:02 crc kubenswrapper[4868]: I1003 13:00:02.851237 4868 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:00:03 crc kubenswrapper[4868]: I1003 13:00:03.355199 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" event={"ID":"e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428","Type":"ContainerDied","Data":"a96ec1672637bf0829441d79303c0fc4d1e4709cf18a0582be81019e30e9fb59"} Oct 03 13:00:03 crc kubenswrapper[4868]: I1003 13:00:03.355260 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a96ec1672637bf0829441d79303c0fc4d1e4709cf18a0582be81019e30e9fb59" Oct 03 13:00:03 crc kubenswrapper[4868]: I1003 13:00:03.355218 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5" Oct 03 13:00:03 crc kubenswrapper[4868]: I1003 13:00:03.357827 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"c81c70808e3db2055985906ff5605af908c411acad06f21116ad2fb658245d13"} Oct 03 13:00:04 crc kubenswrapper[4868]: I1003 13:00:04.698397 4868 scope.go:117] "RemoveContainer" containerID="9b9eacbe36b451a033e7fe810ad0afaa506f13b52f88569261cf6117f69e289d" Oct 03 13:00:04 crc kubenswrapper[4868]: I1003 13:00:04.718144 4868 scope.go:117] "RemoveContainer" containerID="dde0aff41931995e9dd929375ff344d3d102d9315d249cb59f339e02419ed5f3" Oct 03 13:02:02 crc kubenswrapper[4868]: I1003 13:02:02.145155 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:02:02 crc kubenswrapper[4868]: I1003 13:02:02.145811 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:02:32 crc kubenswrapper[4868]: I1003 13:02:32.145423 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:02:32 crc kubenswrapper[4868]: I1003 13:02:32.146280 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:02:40 crc kubenswrapper[4868]: I1003 13:02:40.649894 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6fp9h"] Oct 03 13:02:40 crc kubenswrapper[4868]: I1003 13:02:40.650631 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" podUID="240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" containerName="controller-manager" containerID="cri-o://90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108" gracePeriod=30 Oct 03 13:02:40 crc kubenswrapper[4868]: I1003 13:02:40.754737 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x"] Oct 03 13:02:40 crc kubenswrapper[4868]: I1003 13:02:40.754984 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" podUID="df67ce79-06d5-4d3f-a54e-b77cad420085" containerName="route-controller-manager" containerID="cri-o://566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7" gracePeriod=30 Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.156424 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.197846 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.261511 4868 generic.go:334] "Generic (PLEG): container finished" podID="240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" containerID="90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108" exitCode=0 Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.261631 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.262206 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" event={"ID":"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf","Type":"ContainerDied","Data":"90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108"} Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.262247 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-6fp9h" event={"ID":"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf","Type":"ContainerDied","Data":"baf097e1ecc3f345daa32087d805c113ea44ecb8694dace0970a07ca8380b65b"} Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.262266 4868 scope.go:117] "RemoveContainer" containerID="90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.263865 4868 generic.go:334] "Generic (PLEG): container finished" podID="df67ce79-06d5-4d3f-a54e-b77cad420085" containerID="566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7" exitCode=0 Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.263892 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" event={"ID":"df67ce79-06d5-4d3f-a54e-b77cad420085","Type":"ContainerDied","Data":"566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7"} Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.263907 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" event={"ID":"df67ce79-06d5-4d3f-a54e-b77cad420085","Type":"ContainerDied","Data":"36f693b4d06422ad00ef7dad6ea15079ea5364267da01f927d8aa86dda55ecbd"} Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.263939 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.283002 4868 scope.go:117] "RemoveContainer" containerID="90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108" Oct 03 13:02:41 crc kubenswrapper[4868]: E1003 13:02:41.283377 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108\": container with ID starting with 90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108 not found: ID does not exist" containerID="90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.283406 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108"} err="failed to get container status \"90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108\": rpc error: code = NotFound desc = could not find container \"90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108\": container with ID starting with 90db93d1049c05e658db848abfb96ed6a0e7ba12866bd358ce89c554b6286108 not found: ID does not exist" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.283428 4868 scope.go:117] "RemoveContainer" containerID="566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.290370 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-config\") pod \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.290438 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-client-ca\") pod \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.290489 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggjt9\" (UniqueName: \"kubernetes.io/projected/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-kube-api-access-ggjt9\") pod \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.290543 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-serving-cert\") pod \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.290579 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-proxy-ca-bundles\") pod \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\" (UID: \"240cf9f9-30a6-46ed-bde7-02cdd3fe51cf\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.292487 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" (UID: "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.292721 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-client-ca" (OuterVolumeSpecName: "client-ca") pod "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" (UID: "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.293356 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-config" (OuterVolumeSpecName: "config") pod "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" (UID: "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.298172 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" (UID: "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.298875 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-kube-api-access-ggjt9" (OuterVolumeSpecName: "kube-api-access-ggjt9") pod "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" (UID: "240cf9f9-30a6-46ed-bde7-02cdd3fe51cf"). InnerVolumeSpecName "kube-api-access-ggjt9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.301867 4868 scope.go:117] "RemoveContainer" containerID="566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7" Oct 03 13:02:41 crc kubenswrapper[4868]: E1003 13:02:41.302432 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7\": container with ID starting with 566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7 not found: ID does not exist" containerID="566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.302525 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7"} err="failed to get container status \"566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7\": rpc error: code = NotFound desc = could not find container \"566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7\": container with ID starting with 566206e572e7f07f94106d2349399ecb184b772e12d1fe295eb3650f9f7026e7 not found: ID does not exist" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.391790 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kh78\" (UniqueName: \"kubernetes.io/projected/df67ce79-06d5-4d3f-a54e-b77cad420085-kube-api-access-7kh78\") pod \"df67ce79-06d5-4d3f-a54e-b77cad420085\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.391855 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-client-ca\") pod \"df67ce79-06d5-4d3f-a54e-b77cad420085\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.391944 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-config\") pod \"df67ce79-06d5-4d3f-a54e-b77cad420085\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.392014 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df67ce79-06d5-4d3f-a54e-b77cad420085-serving-cert\") pod \"df67ce79-06d5-4d3f-a54e-b77cad420085\" (UID: \"df67ce79-06d5-4d3f-a54e-b77cad420085\") " Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.392367 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.392392 4868 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.392405 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggjt9\" (UniqueName: \"kubernetes.io/projected/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-kube-api-access-ggjt9\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.392419 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.392432 4868 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.392955 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-client-ca" (OuterVolumeSpecName: "client-ca") pod "df67ce79-06d5-4d3f-a54e-b77cad420085" (UID: "df67ce79-06d5-4d3f-a54e-b77cad420085"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.393102 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-config" (OuterVolumeSpecName: "config") pod "df67ce79-06d5-4d3f-a54e-b77cad420085" (UID: "df67ce79-06d5-4d3f-a54e-b77cad420085"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.396263 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/df67ce79-06d5-4d3f-a54e-b77cad420085-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "df67ce79-06d5-4d3f-a54e-b77cad420085" (UID: "df67ce79-06d5-4d3f-a54e-b77cad420085"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.398549 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df67ce79-06d5-4d3f-a54e-b77cad420085-kube-api-access-7kh78" (OuterVolumeSpecName: "kube-api-access-7kh78") pod "df67ce79-06d5-4d3f-a54e-b77cad420085" (UID: "df67ce79-06d5-4d3f-a54e-b77cad420085"). InnerVolumeSpecName "kube-api-access-7kh78". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.494166 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.494223 4868 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/df67ce79-06d5-4d3f-a54e-b77cad420085-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.494241 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kh78\" (UniqueName: \"kubernetes.io/projected/df67ce79-06d5-4d3f-a54e-b77cad420085-kube-api-access-7kh78\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.494252 4868 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/df67ce79-06d5-4d3f-a54e-b77cad420085-client-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.594464 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x"] Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.605370 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-rql9x"] Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.610794 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6fp9h"] Oct 03 13:02:41 crc kubenswrapper[4868]: I1003 13:02:41.622194 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-6fp9h"] Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.361127 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-ccc49787b-2mhc7"] Oct 03 13:02:42 crc kubenswrapper[4868]: E1003 13:02:42.361933 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" containerName="collect-profiles" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.361952 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" containerName="collect-profiles" Oct 03 13:02:42 crc kubenswrapper[4868]: E1003 13:02:42.361967 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" containerName="controller-manager" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.361974 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" containerName="controller-manager" Oct 03 13:02:42 crc kubenswrapper[4868]: E1003 13:02:42.361998 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df67ce79-06d5-4d3f-a54e-b77cad420085" containerName="route-controller-manager" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.362008 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="df67ce79-06d5-4d3f-a54e-b77cad420085" containerName="route-controller-manager" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.362128 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" containerName="collect-profiles" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.362140 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="df67ce79-06d5-4d3f-a54e-b77cad420085" containerName="route-controller-manager" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.362155 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" containerName="controller-manager" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.362671 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.364313 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk"] Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.364313 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.364417 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.364770 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.365108 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.365255 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.367305 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.367651 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.367668 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.368101 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.368388 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.368750 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.368766 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.368840 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.376167 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.378287 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-ccc49787b-2mhc7"] Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.380709 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk"] Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408265 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-serving-cert\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408309 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-proxy-ca-bundles\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408487 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-client-ca\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408614 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-config\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408668 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cnlz\" (UniqueName: \"kubernetes.io/projected/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-kube-api-access-2cnlz\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408689 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f3480e61-44e7-4b06-8454-011e917cbd0b-client-ca\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408743 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3480e61-44e7-4b06-8454-011e917cbd0b-config\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.408764 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-78cjj\" (UniqueName: \"kubernetes.io/projected/f3480e61-44e7-4b06-8454-011e917cbd0b-kube-api-access-78cjj\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.409022 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3480e61-44e7-4b06-8454-011e917cbd0b-serving-cert\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.509984 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cnlz\" (UniqueName: \"kubernetes.io/projected/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-kube-api-access-2cnlz\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510033 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f3480e61-44e7-4b06-8454-011e917cbd0b-client-ca\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510086 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3480e61-44e7-4b06-8454-011e917cbd0b-config\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510108 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-78cjj\" (UniqueName: \"kubernetes.io/projected/f3480e61-44e7-4b06-8454-011e917cbd0b-kube-api-access-78cjj\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510145 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3480e61-44e7-4b06-8454-011e917cbd0b-serving-cert\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510176 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-serving-cert\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510197 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-proxy-ca-bundles\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510232 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-client-ca\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.510265 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-config\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.512976 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f3480e61-44e7-4b06-8454-011e917cbd0b-config\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.513005 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-config\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.514299 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-proxy-ca-bundles\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.515191 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f3480e61-44e7-4b06-8454-011e917cbd0b-client-ca\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.525910 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-client-ca\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.525945 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-serving-cert\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.540425 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f3480e61-44e7-4b06-8454-011e917cbd0b-serving-cert\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.541812 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cnlz\" (UniqueName: \"kubernetes.io/projected/2ab0a855-a510-4cc4-85b6-c50a96a1ce85-kube-api-access-2cnlz\") pod \"controller-manager-ccc49787b-2mhc7\" (UID: \"2ab0a855-a510-4cc4-85b6-c50a96a1ce85\") " pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.547752 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-78cjj\" (UniqueName: \"kubernetes.io/projected/f3480e61-44e7-4b06-8454-011e917cbd0b-kube-api-access-78cjj\") pod \"route-controller-manager-7f76c6bb97-q7brk\" (UID: \"f3480e61-44e7-4b06-8454-011e917cbd0b\") " pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.558830 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="240cf9f9-30a6-46ed-bde7-02cdd3fe51cf" path="/var/lib/kubelet/pods/240cf9f9-30a6-46ed-bde7-02cdd3fe51cf/volumes" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.561113 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df67ce79-06d5-4d3f-a54e-b77cad420085" path="/var/lib/kubelet/pods/df67ce79-06d5-4d3f-a54e-b77cad420085/volumes" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.687537 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.696546 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.936004 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk"] Oct 03 13:02:42 crc kubenswrapper[4868]: I1003 13:02:42.975044 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-ccc49787b-2mhc7"] Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.284323 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" event={"ID":"2ab0a855-a510-4cc4-85b6-c50a96a1ce85","Type":"ContainerStarted","Data":"35cf390e717a781a5f771072d1fe1542a4d30a333d81fc0537dd60c11eb88708"} Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.284710 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.284724 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" event={"ID":"2ab0a855-a510-4cc4-85b6-c50a96a1ce85","Type":"ContainerStarted","Data":"71339d483e9d265edfeec03b33b684811806abd02847b0d201d793840aa3ae2e"} Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.286429 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" event={"ID":"f3480e61-44e7-4b06-8454-011e917cbd0b","Type":"ContainerStarted","Data":"322a984494ac5a13d7425d1e09fe578b8219e5c0bfc9d8d3facd83728282c4a7"} Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.286506 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" event={"ID":"f3480e61-44e7-4b06-8454-011e917cbd0b","Type":"ContainerStarted","Data":"e73cc0889c2b80350123f50b102b80735b88ce11df8c6f3513e1c060615bc133"} Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.286659 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.290625 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.305982 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-ccc49787b-2mhc7" podStartSLOduration=3.305957258 podStartE2EDuration="3.305957258s" podCreationTimestamp="2025-10-03 13:02:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:02:43.305330573 +0000 UTC m=+759.515179639" watchObservedRunningTime="2025-10-03 13:02:43.305957258 +0000 UTC m=+759.515806314" Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.355663 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" podStartSLOduration=3.355630411 podStartE2EDuration="3.355630411s" podCreationTimestamp="2025-10-03 13:02:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:02:43.35557828 +0000 UTC m=+759.565427346" watchObservedRunningTime="2025-10-03 13:02:43.355630411 +0000 UTC m=+759.565479477" Oct 03 13:02:43 crc kubenswrapper[4868]: I1003 13:02:43.625980 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f76c6bb97-q7brk" Oct 03 13:02:47 crc kubenswrapper[4868]: I1003 13:02:47.786377 4868 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.158345 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-hl8ss"] Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.161807 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.165968 4868 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-g26g7" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.166110 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.166391 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.174869 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-hl8ss"] Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.189618 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8l8xn"] Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.190401 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.193884 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-24d25"] Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.194228 4868 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-lnltd" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.194940 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-24d25" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.205727 4868 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-p9sg6" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.209701 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8l8xn"] Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.229046 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-24d25"] Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.278599 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v2psn\" (UniqueName: \"kubernetes.io/projected/879cd6ef-837a-4622-b311-ba67498835f7-kube-api-access-v2psn\") pod \"cert-manager-5b446d88c5-24d25\" (UID: \"879cd6ef-837a-4622-b311-ba67498835f7\") " pod="cert-manager/cert-manager-5b446d88c5-24d25" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.278660 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww5wh\" (UniqueName: \"kubernetes.io/projected/9a8014e5-4176-4f00-bd52-6cc60d8995b0-kube-api-access-ww5wh\") pod \"cert-manager-webhook-5655c58dd6-8l8xn\" (UID: \"9a8014e5-4176-4f00-bd52-6cc60d8995b0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.278809 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff45c\" (UniqueName: \"kubernetes.io/projected/46746d8f-da2f-4a50-b092-76ac67fa11cb-kube-api-access-ff45c\") pod \"cert-manager-cainjector-7f985d654d-hl8ss\" (UID: \"46746d8f-da2f-4a50-b092-76ac67fa11cb\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.379939 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v2psn\" (UniqueName: \"kubernetes.io/projected/879cd6ef-837a-4622-b311-ba67498835f7-kube-api-access-v2psn\") pod \"cert-manager-5b446d88c5-24d25\" (UID: \"879cd6ef-837a-4622-b311-ba67498835f7\") " pod="cert-manager/cert-manager-5b446d88c5-24d25" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.380161 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww5wh\" (UniqueName: \"kubernetes.io/projected/9a8014e5-4176-4f00-bd52-6cc60d8995b0-kube-api-access-ww5wh\") pod \"cert-manager-webhook-5655c58dd6-8l8xn\" (UID: \"9a8014e5-4176-4f00-bd52-6cc60d8995b0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.380422 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff45c\" (UniqueName: \"kubernetes.io/projected/46746d8f-da2f-4a50-b092-76ac67fa11cb-kube-api-access-ff45c\") pod \"cert-manager-cainjector-7f985d654d-hl8ss\" (UID: \"46746d8f-da2f-4a50-b092-76ac67fa11cb\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.407561 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v2psn\" (UniqueName: \"kubernetes.io/projected/879cd6ef-837a-4622-b311-ba67498835f7-kube-api-access-v2psn\") pod \"cert-manager-5b446d88c5-24d25\" (UID: \"879cd6ef-837a-4622-b311-ba67498835f7\") " pod="cert-manager/cert-manager-5b446d88c5-24d25" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.408888 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff45c\" (UniqueName: \"kubernetes.io/projected/46746d8f-da2f-4a50-b092-76ac67fa11cb-kube-api-access-ff45c\") pod \"cert-manager-cainjector-7f985d654d-hl8ss\" (UID: \"46746d8f-da2f-4a50-b092-76ac67fa11cb\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.408929 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww5wh\" (UniqueName: \"kubernetes.io/projected/9a8014e5-4176-4f00-bd52-6cc60d8995b0-kube-api-access-ww5wh\") pod \"cert-manager-webhook-5655c58dd6-8l8xn\" (UID: \"9a8014e5-4176-4f00-bd52-6cc60d8995b0\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.480119 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.506943 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.519954 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-24d25" Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.926991 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-8l8xn"] Oct 03 13:02:58 crc kubenswrapper[4868]: W1003 13:02:58.935784 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a8014e5_4176_4f00_bd52_6cc60d8995b0.slice/crio-220f1185ec4e3415545ac08391d3c08f2273bc17292c6c47b4829da26d4e9ccb WatchSource:0}: Error finding container 220f1185ec4e3415545ac08391d3c08f2273bc17292c6c47b4829da26d4e9ccb: Status 404 returned error can't find the container with id 220f1185ec4e3415545ac08391d3c08f2273bc17292c6c47b4829da26d4e9ccb Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.938817 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.981593 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-24d25"] Oct 03 13:02:58 crc kubenswrapper[4868]: W1003 13:02:58.984535 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod879cd6ef_837a_4622_b311_ba67498835f7.slice/crio-472e5ede372e3d0df7d5897b5bbe3d6f760a33966f1dcda410507ba273f8fb3d WatchSource:0}: Error finding container 472e5ede372e3d0df7d5897b5bbe3d6f760a33966f1dcda410507ba273f8fb3d: Status 404 returned error can't find the container with id 472e5ede372e3d0df7d5897b5bbe3d6f760a33966f1dcda410507ba273f8fb3d Oct 03 13:02:58 crc kubenswrapper[4868]: I1003 13:02:58.987782 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-hl8ss"] Oct 03 13:02:58 crc kubenswrapper[4868]: W1003 13:02:58.988456 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod46746d8f_da2f_4a50_b092_76ac67fa11cb.slice/crio-73224e6e9443d60abcf8156e37e458ea6539f8e5fe2f387da1dc1030e1828c28 WatchSource:0}: Error finding container 73224e6e9443d60abcf8156e37e458ea6539f8e5fe2f387da1dc1030e1828c28: Status 404 returned error can't find the container with id 73224e6e9443d60abcf8156e37e458ea6539f8e5fe2f387da1dc1030e1828c28 Oct 03 13:02:59 crc kubenswrapper[4868]: I1003 13:02:59.453166 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" event={"ID":"9a8014e5-4176-4f00-bd52-6cc60d8995b0","Type":"ContainerStarted","Data":"220f1185ec4e3415545ac08391d3c08f2273bc17292c6c47b4829da26d4e9ccb"} Oct 03 13:02:59 crc kubenswrapper[4868]: I1003 13:02:59.454293 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-24d25" event={"ID":"879cd6ef-837a-4622-b311-ba67498835f7","Type":"ContainerStarted","Data":"472e5ede372e3d0df7d5897b5bbe3d6f760a33966f1dcda410507ba273f8fb3d"} Oct 03 13:02:59 crc kubenswrapper[4868]: I1003 13:02:59.455384 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" event={"ID":"46746d8f-da2f-4a50-b092-76ac67fa11cb","Type":"ContainerStarted","Data":"73224e6e9443d60abcf8156e37e458ea6539f8e5fe2f387da1dc1030e1828c28"} Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.145838 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.146251 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.146305 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.146860 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c81c70808e3db2055985906ff5605af908c411acad06f21116ad2fb658245d13"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.146914 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://c81c70808e3db2055985906ff5605af908c411acad06f21116ad2fb658245d13" gracePeriod=600 Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.474771 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="c81c70808e3db2055985906ff5605af908c411acad06f21116ad2fb658245d13" exitCode=0 Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.474826 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"c81c70808e3db2055985906ff5605af908c411acad06f21116ad2fb658245d13"} Oct 03 13:03:02 crc kubenswrapper[4868]: I1003 13:03:02.474867 4868 scope.go:117] "RemoveContainer" containerID="a6339fe657203e128036640a21a125b1864bd95a4f3ae7f1fb78d282fb38cf9a" Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.486915 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" event={"ID":"46746d8f-da2f-4a50-b092-76ac67fa11cb","Type":"ContainerStarted","Data":"a4d736c8c45115a8b05a6f057c2f9034c490bf155b8195125708298d92f664cd"} Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.489912 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" event={"ID":"9a8014e5-4176-4f00-bd52-6cc60d8995b0","Type":"ContainerStarted","Data":"f942a95dda6b6966c8e82fb3444d74c4787c977a6838a8963b2340470c46a91a"} Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.490116 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.495359 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"9f15934458284405926073ad6c96722b605797c6a92fa9cf32b28fa47b81ce6a"} Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.497373 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-24d25" event={"ID":"879cd6ef-837a-4622-b311-ba67498835f7","Type":"ContainerStarted","Data":"be12dfa446f3140730c6e573d2ee2ecb8e430d95c2c9de3441c8b6fff9238084"} Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.501798 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-hl8ss" podStartSLOduration=1.532183831 podStartE2EDuration="6.501760954s" podCreationTimestamp="2025-10-03 13:02:58 +0000 UTC" firstStartedPulling="2025-10-03 13:02:58.990288997 +0000 UTC m=+775.200138063" lastFinishedPulling="2025-10-03 13:03:03.95986612 +0000 UTC m=+780.169715186" observedRunningTime="2025-10-03 13:03:04.499894006 +0000 UTC m=+780.709743072" watchObservedRunningTime="2025-10-03 13:03:04.501760954 +0000 UTC m=+780.711610030" Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.517978 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" podStartSLOduration=1.484726125 podStartE2EDuration="6.517938488s" podCreationTimestamp="2025-10-03 13:02:58 +0000 UTC" firstStartedPulling="2025-10-03 13:02:58.938397917 +0000 UTC m=+775.148246983" lastFinishedPulling="2025-10-03 13:03:03.97161029 +0000 UTC m=+780.181459346" observedRunningTime="2025-10-03 13:03:04.517030745 +0000 UTC m=+780.726879811" watchObservedRunningTime="2025-10-03 13:03:04.517938488 +0000 UTC m=+780.727787554" Oct 03 13:03:04 crc kubenswrapper[4868]: I1003 13:03:04.557621 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-24d25" podStartSLOduration=1.593084112 podStartE2EDuration="6.557589104s" podCreationTimestamp="2025-10-03 13:02:58 +0000 UTC" firstStartedPulling="2025-10-03 13:02:58.986732516 +0000 UTC m=+775.196581582" lastFinishedPulling="2025-10-03 13:03:03.951237508 +0000 UTC m=+780.161086574" observedRunningTime="2025-10-03 13:03:04.552898324 +0000 UTC m=+780.762747390" watchObservedRunningTime="2025-10-03 13:03:04.557589104 +0000 UTC m=+780.767438170" Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.751863 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fgxcz"] Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.752817 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-controller" containerID="cri-o://42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" gracePeriod=30 Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.752937 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-acl-logging" containerID="cri-o://7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" gracePeriod=30 Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.752913 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="nbdb" containerID="cri-o://f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" gracePeriod=30 Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.753039 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-node" containerID="cri-o://6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" gracePeriod=30 Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.753294 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="northd" containerID="cri-o://943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" gracePeriod=30 Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.753353 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="sbdb" containerID="cri-o://1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" gracePeriod=30 Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.753382 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" gracePeriod=30 Oct 03 13:03:08 crc kubenswrapper[4868]: I1003 13:03:08.781679 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" containerID="cri-o://162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" gracePeriod=30 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.089561 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/3.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.091921 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovn-acl-logging/0.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.092412 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovn-controller/0.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.093105 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.141956 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5jdd8"] Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142218 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="northd" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142234 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="northd" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142248 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142254 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142260 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142267 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142273 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="sbdb" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142279 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="sbdb" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142289 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142295 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142304 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142310 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142317 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-node" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142323 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-node" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142335 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-acl-logging" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142341 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-acl-logging" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142348 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142353 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142362 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kubecfg-setup" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142369 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kubecfg-setup" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142381 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142390 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142403 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="nbdb" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142409 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="nbdb" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142561 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="northd" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142577 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-node" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142589 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-acl-logging" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142601 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142609 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142615 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="nbdb" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142621 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="kube-rbac-proxy-ovn-metrics" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142628 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142634 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovn-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142644 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="sbdb" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.142751 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142759 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.142837 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.143014 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerName="ovnkube-controller" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.144411 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227216 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-systemd-units\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227311 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-script-lib\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227354 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227384 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-netd\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227455 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-ovn\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227516 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-netns\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227542 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-config\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227589 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-openvswitch\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227611 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovn-node-metrics-cert\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227627 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-ovn-kubernetes\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227642 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-etc-openvswitch\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227702 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-systemd\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227719 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-node-log\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227783 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-var-lib-cni-networks-ovn-kubernetes\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227811 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-env-overrides\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227852 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-bin\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227861 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227877 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-kubelet\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227897 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227924 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-log-socket\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227932 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227943 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2skbw\" (UniqueName: \"kubernetes.io/projected/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-kube-api-access-2skbw\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227954 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227968 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-slash\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228008 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-var-lib-openvswitch\") pod \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\" (UID: \"46b5db5d-3104-43ab-9ae7-080ec1f50ca9\") " Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228278 4868 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-netd\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228295 4868 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-systemd-units\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228337 4868 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.227974 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228293 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228320 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228675 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228312 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228346 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228431 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228460 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-log-socket" (OuterVolumeSpecName: "log-socket") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228756 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228760 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-slash" (OuterVolumeSpecName: "host-slash") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.228760 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-node-log" (OuterVolumeSpecName: "node-log") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.229126 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.235017 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.235276 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-kube-api-access-2skbw" (OuterVolumeSpecName: "kube-api-access-2skbw") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "kube-api-access-2skbw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.251099 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "46b5db5d-3104-43ab-9ae7-080ec1f50ca9" (UID: "46b5db5d-3104-43ab-9ae7-080ec1f50ca9"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.329771 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-run-ovn-kubernetes\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.329812 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-ovnkube-config\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.329832 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-node-log\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.329854 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-kubelet\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.329899 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-systemd\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.329935 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-run-netns\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.329961 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-cni-netd\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330039 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-ovn\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330117 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330139 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-ovnkube-script-lib\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330153 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kbcv\" (UniqueName: \"kubernetes.io/projected/f4519a23-2717-4e66-863b-92d57ed985e1-kube-api-access-7kbcv\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330182 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-cni-bin\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330202 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330247 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-var-lib-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330261 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-log-socket\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330279 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-etc-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330295 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-systemd-units\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330310 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-slash\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330333 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f4519a23-2717-4e66-863b-92d57ed985e1-ovn-node-metrics-cert\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330356 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-env-overrides\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330395 4868 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330406 4868 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-netns\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330417 4868 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovnkube-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330426 4868 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330434 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330443 4868 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330451 4868 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330459 4868 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-run-systemd\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330467 4868 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-node-log\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330475 4868 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330484 4868 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-env-overrides\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330494 4868 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-cni-bin\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330501 4868 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-kubelet\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330509 4868 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-log-socket\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330517 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2skbw\" (UniqueName: \"kubernetes.io/projected/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-kube-api-access-2skbw\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330526 4868 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-host-slash\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.330534 4868 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/46b5db5d-3104-43ab-9ae7-080ec1f50ca9-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431377 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-env-overrides\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431454 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-run-ovn-kubernetes\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431475 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-ovnkube-config\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431492 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-node-log\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431512 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-kubelet\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431530 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-systemd\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431552 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-run-netns\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431574 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-cni-netd\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431569 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-run-ovn-kubernetes\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431595 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-ovn\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431620 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431636 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-systemd\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431645 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kbcv\" (UniqueName: \"kubernetes.io/projected/f4519a23-2717-4e66-863b-92d57ed985e1-kube-api-access-7kbcv\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431672 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-ovnkube-script-lib\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431679 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-cni-netd\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431642 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-run-netns\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431708 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-cni-bin\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431642 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-node-log\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431754 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431764 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-kubelet\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431778 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-cni-bin\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431679 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431673 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-run-ovn\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431815 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431855 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-var-lib-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431882 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-log-socket\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431907 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-etc-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431925 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-slash\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431947 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-systemd-units\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431970 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-log-socket\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.431995 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f4519a23-2717-4e66-863b-92d57ed985e1-ovn-node-metrics-cert\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.432007 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-var-lib-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.432027 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-env-overrides\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.432118 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-host-slash\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.432142 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-etc-openvswitch\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.432162 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f4519a23-2717-4e66-863b-92d57ed985e1-systemd-units\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.432306 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-ovnkube-config\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.432355 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f4519a23-2717-4e66-863b-92d57ed985e1-ovnkube-script-lib\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.435369 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f4519a23-2717-4e66-863b-92d57ed985e1-ovn-node-metrics-cert\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.446806 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kbcv\" (UniqueName: \"kubernetes.io/projected/f4519a23-2717-4e66-863b-92d57ed985e1-kube-api-access-7kbcv\") pod \"ovnkube-node-5jdd8\" (UID: \"f4519a23-2717-4e66-863b-92d57ed985e1\") " pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.458838 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:09 crc kubenswrapper[4868]: W1003 13:03:09.473522 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4519a23_2717_4e66_863b_92d57ed985e1.slice/crio-59dd5a90ce561f9d0b0517bda728b6031d95032ac1fad470edcad7947d6def0a WatchSource:0}: Error finding container 59dd5a90ce561f9d0b0517bda728b6031d95032ac1fad470edcad7947d6def0a: Status 404 returned error can't find the container with id 59dd5a90ce561f9d0b0517bda728b6031d95032ac1fad470edcad7947d6def0a Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.532134 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"59dd5a90ce561f9d0b0517bda728b6031d95032ac1fad470edcad7947d6def0a"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.533756 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jpqwj_61cc9d5b-e515-469c-a472-190ebf3609a3/kube-multus/1.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.534141 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jpqwj_61cc9d5b-e515-469c-a472-190ebf3609a3/kube-multus/0.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.534168 4868 generic.go:334] "Generic (PLEG): container finished" podID="61cc9d5b-e515-469c-a472-190ebf3609a3" containerID="71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252" exitCode=2 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.534206 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jpqwj" event={"ID":"61cc9d5b-e515-469c-a472-190ebf3609a3","Type":"ContainerDied","Data":"71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.534228 4868 scope.go:117] "RemoveContainer" containerID="ef48b5c04339a3fdae775e5a995df4ae35a64963e7cd0ac73eaf2c542dbc3233" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.534518 4868 scope.go:117] "RemoveContainer" containerID="71879fd35501740f87a21667de40ba97c097d797c9b89f41dff7ab96b3939252" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.538355 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovnkube-controller/3.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.546818 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovn-acl-logging/0.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.547729 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-fgxcz_46b5db5d-3104-43ab-9ae7-080ec1f50ca9/ovn-controller/0.log" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548153 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" exitCode=0 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548177 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" exitCode=0 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548190 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" exitCode=0 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548199 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" exitCode=0 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548208 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" exitCode=0 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548220 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" exitCode=0 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548232 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" exitCode=143 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548242 4868 generic.go:334] "Generic (PLEG): container finished" podID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" containerID="42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" exitCode=143 Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548264 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548300 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548320 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548330 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548340 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548267 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548350 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548514 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548546 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548554 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548561 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548568 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548575 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548582 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548595 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548602 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548632 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548653 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548685 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548701 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548719 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548725 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548731 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548736 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548741 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548747 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548753 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548765 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548776 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548786 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548793 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548798 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548804 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548810 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548817 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548822 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548827 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548833 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548842 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548852 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-fgxcz" event={"ID":"46b5db5d-3104-43ab-9ae7-080ec1f50ca9","Type":"ContainerDied","Data":"afefc4e5b6ee82429e1b712bc5a39a4dcb74af1d209ce8643b62e893dab962d5"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548860 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548866 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548871 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548876 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548880 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548885 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548890 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548895 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548900 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.548906 4868 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.571287 4868 scope.go:117] "RemoveContainer" containerID="162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.589851 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fgxcz"] Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.595574 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-fgxcz"] Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.596662 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.677724 4868 scope.go:117] "RemoveContainer" containerID="1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.690555 4868 scope.go:117] "RemoveContainer" containerID="f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.702214 4868 scope.go:117] "RemoveContainer" containerID="943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.714787 4868 scope.go:117] "RemoveContainer" containerID="4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.729628 4868 scope.go:117] "RemoveContainer" containerID="6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.754729 4868 scope.go:117] "RemoveContainer" containerID="7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.768958 4868 scope.go:117] "RemoveContainer" containerID="42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.782434 4868 scope.go:117] "RemoveContainer" containerID="2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.801526 4868 scope.go:117] "RemoveContainer" containerID="162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.801976 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": container with ID starting with 162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a not found: ID does not exist" containerID="162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.802008 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} err="failed to get container status \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": rpc error: code = NotFound desc = could not find container \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": container with ID starting with 162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.802028 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.802540 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": container with ID starting with 4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3 not found: ID does not exist" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.802594 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} err="failed to get container status \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": rpc error: code = NotFound desc = could not find container \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": container with ID starting with 4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.802629 4868 scope.go:117] "RemoveContainer" containerID="1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.802944 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": container with ID starting with 1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4 not found: ID does not exist" containerID="1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.802968 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} err="failed to get container status \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": rpc error: code = NotFound desc = could not find container \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": container with ID starting with 1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.802982 4868 scope.go:117] "RemoveContainer" containerID="f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.803687 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": container with ID starting with f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0 not found: ID does not exist" containerID="f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.803712 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} err="failed to get container status \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": rpc error: code = NotFound desc = could not find container \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": container with ID starting with f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.803725 4868 scope.go:117] "RemoveContainer" containerID="943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.804035 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": container with ID starting with 943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131 not found: ID does not exist" containerID="943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.804081 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} err="failed to get container status \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": rpc error: code = NotFound desc = could not find container \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": container with ID starting with 943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.804109 4868 scope.go:117] "RemoveContainer" containerID="4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.804459 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": container with ID starting with 4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06 not found: ID does not exist" containerID="4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.804514 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} err="failed to get container status \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": rpc error: code = NotFound desc = could not find container \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": container with ID starting with 4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.804541 4868 scope.go:117] "RemoveContainer" containerID="6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.804817 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": container with ID starting with 6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67 not found: ID does not exist" containerID="6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.804840 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} err="failed to get container status \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": rpc error: code = NotFound desc = could not find container \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": container with ID starting with 6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.804853 4868 scope.go:117] "RemoveContainer" containerID="7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.805083 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": container with ID starting with 7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636 not found: ID does not exist" containerID="7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.805109 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} err="failed to get container status \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": rpc error: code = NotFound desc = could not find container \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": container with ID starting with 7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.805124 4868 scope.go:117] "RemoveContainer" containerID="42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.805619 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": container with ID starting with 42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9 not found: ID does not exist" containerID="42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.805650 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} err="failed to get container status \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": rpc error: code = NotFound desc = could not find container \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": container with ID starting with 42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.805665 4868 scope.go:117] "RemoveContainer" containerID="2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7" Oct 03 13:03:09 crc kubenswrapper[4868]: E1003 13:03:09.805945 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": container with ID starting with 2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7 not found: ID does not exist" containerID="2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.805970 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} err="failed to get container status \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": rpc error: code = NotFound desc = could not find container \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": container with ID starting with 2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.805985 4868 scope.go:117] "RemoveContainer" containerID="162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.806268 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} err="failed to get container status \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": rpc error: code = NotFound desc = could not find container \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": container with ID starting with 162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.806287 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.806593 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} err="failed to get container status \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": rpc error: code = NotFound desc = could not find container \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": container with ID starting with 4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.806613 4868 scope.go:117] "RemoveContainer" containerID="1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.806941 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} err="failed to get container status \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": rpc error: code = NotFound desc = could not find container \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": container with ID starting with 1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.806971 4868 scope.go:117] "RemoveContainer" containerID="f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.807266 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} err="failed to get container status \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": rpc error: code = NotFound desc = could not find container \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": container with ID starting with f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.807289 4868 scope.go:117] "RemoveContainer" containerID="943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.807601 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} err="failed to get container status \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": rpc error: code = NotFound desc = could not find container \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": container with ID starting with 943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.807623 4868 scope.go:117] "RemoveContainer" containerID="4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.808036 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} err="failed to get container status \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": rpc error: code = NotFound desc = could not find container \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": container with ID starting with 4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.808082 4868 scope.go:117] "RemoveContainer" containerID="6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.808371 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} err="failed to get container status \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": rpc error: code = NotFound desc = could not find container \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": container with ID starting with 6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.808391 4868 scope.go:117] "RemoveContainer" containerID="7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.808875 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} err="failed to get container status \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": rpc error: code = NotFound desc = could not find container \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": container with ID starting with 7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.808895 4868 scope.go:117] "RemoveContainer" containerID="42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.809150 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} err="failed to get container status \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": rpc error: code = NotFound desc = could not find container \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": container with ID starting with 42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.809173 4868 scope.go:117] "RemoveContainer" containerID="2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.809422 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} err="failed to get container status \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": rpc error: code = NotFound desc = could not find container \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": container with ID starting with 2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.809444 4868 scope.go:117] "RemoveContainer" containerID="162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.809737 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} err="failed to get container status \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": rpc error: code = NotFound desc = could not find container \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": container with ID starting with 162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.809764 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810000 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} err="failed to get container status \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": rpc error: code = NotFound desc = could not find container \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": container with ID starting with 4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810013 4868 scope.go:117] "RemoveContainer" containerID="1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810327 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} err="failed to get container status \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": rpc error: code = NotFound desc = could not find container \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": container with ID starting with 1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810356 4868 scope.go:117] "RemoveContainer" containerID="f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810611 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} err="failed to get container status \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": rpc error: code = NotFound desc = could not find container \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": container with ID starting with f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810636 4868 scope.go:117] "RemoveContainer" containerID="943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810863 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} err="failed to get container status \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": rpc error: code = NotFound desc = could not find container \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": container with ID starting with 943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.810885 4868 scope.go:117] "RemoveContainer" containerID="4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811102 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} err="failed to get container status \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": rpc error: code = NotFound desc = could not find container \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": container with ID starting with 4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811124 4868 scope.go:117] "RemoveContainer" containerID="6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811330 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} err="failed to get container status \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": rpc error: code = NotFound desc = could not find container \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": container with ID starting with 6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811349 4868 scope.go:117] "RemoveContainer" containerID="7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811581 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} err="failed to get container status \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": rpc error: code = NotFound desc = could not find container \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": container with ID starting with 7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811611 4868 scope.go:117] "RemoveContainer" containerID="42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811956 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} err="failed to get container status \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": rpc error: code = NotFound desc = could not find container \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": container with ID starting with 42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.811983 4868 scope.go:117] "RemoveContainer" containerID="2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.812319 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} err="failed to get container status \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": rpc error: code = NotFound desc = could not find container \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": container with ID starting with 2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.812337 4868 scope.go:117] "RemoveContainer" containerID="162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.812615 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a"} err="failed to get container status \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": rpc error: code = NotFound desc = could not find container \"162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a\": container with ID starting with 162c53b4b557ddd587ee716d22aa4a6216a8f1e22fd495cf778b764ff56e156a not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.812645 4868 scope.go:117] "RemoveContainer" containerID="4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813042 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3"} err="failed to get container status \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": rpc error: code = NotFound desc = could not find container \"4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3\": container with ID starting with 4bf9cae81a2f9b9c0ac15e7b33ed2e8e9a883011fb9d8d715636547346458da3 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813090 4868 scope.go:117] "RemoveContainer" containerID="1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813349 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4"} err="failed to get container status \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": rpc error: code = NotFound desc = could not find container \"1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4\": container with ID starting with 1af218537989e3956fe4d15cdf1e334c1bdc1f90851e47f560f0d852ea1ba2c4 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813368 4868 scope.go:117] "RemoveContainer" containerID="f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813597 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0"} err="failed to get container status \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": rpc error: code = NotFound desc = could not find container \"f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0\": container with ID starting with f4b9127c598a76e7c46b847e85646e67807da2f098a5ef4c8d4604217505a5e0 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813651 4868 scope.go:117] "RemoveContainer" containerID="943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813963 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131"} err="failed to get container status \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": rpc error: code = NotFound desc = could not find container \"943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131\": container with ID starting with 943a592fd2b0772d25f131c1d6db6c7c3b77d693971e418eb04ba4001a9f3131 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.813982 4868 scope.go:117] "RemoveContainer" containerID="4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.814262 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06"} err="failed to get container status \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": rpc error: code = NotFound desc = could not find container \"4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06\": container with ID starting with 4cbd6f6d3c975dd6558adba78d57fb1d5ee849c016d81fd48a7f6de67d69db06 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.814281 4868 scope.go:117] "RemoveContainer" containerID="6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.814513 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67"} err="failed to get container status \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": rpc error: code = NotFound desc = could not find container \"6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67\": container with ID starting with 6610c5ba1227d75b808ae1ed25d8d43d0725f3227cc07c5792997ffe06df0c67 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.814537 4868 scope.go:117] "RemoveContainer" containerID="7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.814878 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636"} err="failed to get container status \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": rpc error: code = NotFound desc = could not find container \"7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636\": container with ID starting with 7aa8f904412b87be00fee9f21fa324a1b56b713595a2555a886509a8c01a3636 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.814898 4868 scope.go:117] "RemoveContainer" containerID="42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.815226 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9"} err="failed to get container status \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": rpc error: code = NotFound desc = could not find container \"42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9\": container with ID starting with 42ba706d3a2fe40b6983e752aec829a8125c075d4e8763cf15af65710bf2d4f9 not found: ID does not exist" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.815247 4868 scope.go:117] "RemoveContainer" containerID="2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7" Oct 03 13:03:09 crc kubenswrapper[4868]: I1003 13:03:09.815569 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7"} err="failed to get container status \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": rpc error: code = NotFound desc = could not find container \"2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7\": container with ID starting with 2609e3916d3e3aafd00ec96b85b7730cf73686311d580cd61e5dab003dad2cd7 not found: ID does not exist" Oct 03 13:03:10 crc kubenswrapper[4868]: I1003 13:03:10.549975 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46b5db5d-3104-43ab-9ae7-080ec1f50ca9" path="/var/lib/kubelet/pods/46b5db5d-3104-43ab-9ae7-080ec1f50ca9/volumes" Oct 03 13:03:10 crc kubenswrapper[4868]: I1003 13:03:10.553814 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jpqwj_61cc9d5b-e515-469c-a472-190ebf3609a3/kube-multus/1.log" Oct 03 13:03:10 crc kubenswrapper[4868]: I1003 13:03:10.553892 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jpqwj" event={"ID":"61cc9d5b-e515-469c-a472-190ebf3609a3","Type":"ContainerStarted","Data":"22e15cf492995c6a915251dfe385062495cfacdcffb43ddb3255266a6906735c"} Oct 03 13:03:10 crc kubenswrapper[4868]: I1003 13:03:10.556514 4868 generic.go:334] "Generic (PLEG): container finished" podID="f4519a23-2717-4e66-863b-92d57ed985e1" containerID="215321a30eeac683fca0417b00e5f044538c7c334719b33ecd54d2bf00ac743b" exitCode=0 Oct 03 13:03:10 crc kubenswrapper[4868]: I1003 13:03:10.556547 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerDied","Data":"215321a30eeac683fca0417b00e5f044538c7c334719b33ecd54d2bf00ac743b"} Oct 03 13:03:11 crc kubenswrapper[4868]: I1003 13:03:11.564725 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"58b1e2ea41e4a5c0d89da62a9a1f86b9346044760a839b9c828aa0da9bd4e2b7"} Oct 03 13:03:11 crc kubenswrapper[4868]: I1003 13:03:11.565086 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"9832e6d93ec3bbac89417630765c4bf38590f9f79393b11dc81c63c54fdb557a"} Oct 03 13:03:11 crc kubenswrapper[4868]: I1003 13:03:11.565101 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"270d9ef8fad0b24c8eca7c3adde0b83a05d34cf04b783d2346c05f58d8da8a45"} Oct 03 13:03:11 crc kubenswrapper[4868]: I1003 13:03:11.565112 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"4f8d3820f47109deefbac70ce758c747c235001f922b4ee2622d9ffe30a4e587"} Oct 03 13:03:11 crc kubenswrapper[4868]: I1003 13:03:11.565124 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"bf38982fdb6e97f42d6045432c1fd09202425d6795820bebfdac648dcff6491d"} Oct 03 13:03:11 crc kubenswrapper[4868]: I1003 13:03:11.565135 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"1e1a990930a7e317cb56bd60ea063131422812e958a0497b9ee97ec81c8cbe23"} Oct 03 13:03:13 crc kubenswrapper[4868]: I1003 13:03:13.510325 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-8l8xn" Oct 03 13:03:13 crc kubenswrapper[4868]: I1003 13:03:13.578615 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"2dc93579aa7af122d3fff93d4360e747bd1de85575b9889c7560d9510aafbb1e"} Oct 03 13:03:16 crc kubenswrapper[4868]: I1003 13:03:16.597657 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" event={"ID":"f4519a23-2717-4e66-863b-92d57ed985e1","Type":"ContainerStarted","Data":"daaabd07936d5db1917f00359195cf31df3334fbc57ef91d224ed08735a4179d"} Oct 03 13:03:16 crc kubenswrapper[4868]: I1003 13:03:16.598026 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:16 crc kubenswrapper[4868]: I1003 13:03:16.598045 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:16 crc kubenswrapper[4868]: I1003 13:03:16.644575 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" podStartSLOduration=7.6445574480000005 podStartE2EDuration="7.644557448s" podCreationTimestamp="2025-10-03 13:03:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:03:16.640007082 +0000 UTC m=+792.849856158" watchObservedRunningTime="2025-10-03 13:03:16.644557448 +0000 UTC m=+792.854406514" Oct 03 13:03:16 crc kubenswrapper[4868]: I1003 13:03:16.694640 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:17 crc kubenswrapper[4868]: I1003 13:03:17.604405 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:17 crc kubenswrapper[4868]: I1003 13:03:17.648735 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:39 crc kubenswrapper[4868]: I1003 13:03:39.480652 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5jdd8" Oct 03 13:03:48 crc kubenswrapper[4868]: I1003 13:03:48.876222 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zpf4m"] Oct 03 13:03:48 crc kubenswrapper[4868]: I1003 13:03:48.877720 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:48 crc kubenswrapper[4868]: I1003 13:03:48.885583 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zpf4m"] Oct 03 13:03:48 crc kubenswrapper[4868]: I1003 13:03:48.970665 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-catalog-content\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:48 crc kubenswrapper[4868]: I1003 13:03:48.970759 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-utilities\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:48 crc kubenswrapper[4868]: I1003 13:03:48.970858 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54mpj\" (UniqueName: \"kubernetes.io/projected/47c25bfa-f126-4dcd-84c0-2bea653db9df-kube-api-access-54mpj\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.071788 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54mpj\" (UniqueName: \"kubernetes.io/projected/47c25bfa-f126-4dcd-84c0-2bea653db9df-kube-api-access-54mpj\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.071853 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-catalog-content\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.071935 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-utilities\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.072480 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-utilities\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.072503 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-catalog-content\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.094274 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54mpj\" (UniqueName: \"kubernetes.io/projected/47c25bfa-f126-4dcd-84c0-2bea653db9df-kube-api-access-54mpj\") pod \"community-operators-zpf4m\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.211887 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.677571 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zpf4m"] Oct 03 13:03:49 crc kubenswrapper[4868]: I1003 13:03:49.813163 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zpf4m" event={"ID":"47c25bfa-f126-4dcd-84c0-2bea653db9df","Type":"ContainerStarted","Data":"420abd10128928d062bedd9596b01bcffd3d98d277dfe8d04cc80d8807b19ef6"} Oct 03 13:03:50 crc kubenswrapper[4868]: I1003 13:03:50.820268 4868 generic.go:334] "Generic (PLEG): container finished" podID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerID="4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5" exitCode=0 Oct 03 13:03:50 crc kubenswrapper[4868]: I1003 13:03:50.820520 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zpf4m" event={"ID":"47c25bfa-f126-4dcd-84c0-2bea653db9df","Type":"ContainerDied","Data":"4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5"} Oct 03 13:03:51 crc kubenswrapper[4868]: I1003 13:03:51.831475 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zpf4m" event={"ID":"47c25bfa-f126-4dcd-84c0-2bea653db9df","Type":"ContainerStarted","Data":"e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a"} Oct 03 13:03:52 crc kubenswrapper[4868]: I1003 13:03:52.844672 4868 generic.go:334] "Generic (PLEG): container finished" podID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerID="e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a" exitCode=0 Oct 03 13:03:52 crc kubenswrapper[4868]: I1003 13:03:52.844707 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zpf4m" event={"ID":"47c25bfa-f126-4dcd-84c0-2bea653db9df","Type":"ContainerDied","Data":"e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a"} Oct 03 13:03:53 crc kubenswrapper[4868]: I1003 13:03:53.852007 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zpf4m" event={"ID":"47c25bfa-f126-4dcd-84c0-2bea653db9df","Type":"ContainerStarted","Data":"cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64"} Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.696450 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zpf4m" podStartSLOduration=7.256611278 podStartE2EDuration="9.696429181s" podCreationTimestamp="2025-10-03 13:03:48 +0000 UTC" firstStartedPulling="2025-10-03 13:03:50.82265756 +0000 UTC m=+827.032506626" lastFinishedPulling="2025-10-03 13:03:53.262475463 +0000 UTC m=+829.472324529" observedRunningTime="2025-10-03 13:03:53.870604354 +0000 UTC m=+830.080453440" watchObservedRunningTime="2025-10-03 13:03:57.696429181 +0000 UTC m=+833.906278257" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.699879 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv"] Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.701214 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.706078 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.709569 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv"] Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.784511 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.784864 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vdrq6\" (UniqueName: \"kubernetes.io/projected/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-kube-api-access-vdrq6\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.784899 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.885761 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.885841 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.885882 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vdrq6\" (UniqueName: \"kubernetes.io/projected/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-kube-api-access-vdrq6\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.886252 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-util\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.886551 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-bundle\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:57 crc kubenswrapper[4868]: I1003 13:03:57.905600 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vdrq6\" (UniqueName: \"kubernetes.io/projected/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-kube-api-access-vdrq6\") pod \"fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:58 crc kubenswrapper[4868]: I1003 13:03:58.017504 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:03:58 crc kubenswrapper[4868]: I1003 13:03:58.408219 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv"] Oct 03 13:03:58 crc kubenswrapper[4868]: I1003 13:03:58.877503 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" event={"ID":"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120","Type":"ContainerStarted","Data":"8c8cf47d81e95251dc34bf8df7e60534731ccece8be9f48a31759ea250c328e3"} Oct 03 13:03:58 crc kubenswrapper[4868]: I1003 13:03:58.877563 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" event={"ID":"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120","Type":"ContainerStarted","Data":"ec68405559bae354e2ac325252ec7eead4b1665990a2acba566c27901db31312"} Oct 03 13:03:59 crc kubenswrapper[4868]: I1003 13:03:59.213022 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:59 crc kubenswrapper[4868]: I1003 13:03:59.213104 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:59 crc kubenswrapper[4868]: I1003 13:03:59.262330 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:03:59 crc kubenswrapper[4868]: I1003 13:03:59.885502 4868 generic.go:334] "Generic (PLEG): container finished" podID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerID="8c8cf47d81e95251dc34bf8df7e60534731ccece8be9f48a31759ea250c328e3" exitCode=0 Oct 03 13:03:59 crc kubenswrapper[4868]: I1003 13:03:59.885566 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" event={"ID":"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120","Type":"ContainerDied","Data":"8c8cf47d81e95251dc34bf8df7e60534731ccece8be9f48a31759ea250c328e3"} Oct 03 13:03:59 crc kubenswrapper[4868]: I1003 13:03:59.926666 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.260081 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-f6jb7"] Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.271514 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.280001 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f6jb7"] Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.417725 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-682st\" (UniqueName: \"kubernetes.io/projected/b5056916-3ba0-49fb-951c-f1f14ad238d4-kube-api-access-682st\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.417878 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-utilities\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.417973 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-catalog-content\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.519389 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-catalog-content\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.519506 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-682st\" (UniqueName: \"kubernetes.io/projected/b5056916-3ba0-49fb-951c-f1f14ad238d4-kube-api-access-682st\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.519565 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-utilities\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.520086 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-utilities\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.520086 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-catalog-content\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.537868 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-682st\" (UniqueName: \"kubernetes.io/projected/b5056916-3ba0-49fb-951c-f1f14ad238d4-kube-api-access-682st\") pod \"redhat-operators-f6jb7\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.598405 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.815554 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-f6jb7"] Oct 03 13:04:00 crc kubenswrapper[4868]: I1003 13:04:00.893216 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f6jb7" event={"ID":"b5056916-3ba0-49fb-951c-f1f14ad238d4","Type":"ContainerStarted","Data":"550314169d8df16f665f82d6519a6a09eafa1d765c317c4856287f46eed841ec"} Oct 03 13:04:01 crc kubenswrapper[4868]: I1003 13:04:01.898525 4868 generic.go:334] "Generic (PLEG): container finished" podID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerID="b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e" exitCode=0 Oct 03 13:04:01 crc kubenswrapper[4868]: I1003 13:04:01.898572 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f6jb7" event={"ID":"b5056916-3ba0-49fb-951c-f1f14ad238d4","Type":"ContainerDied","Data":"b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e"} Oct 03 13:04:02 crc kubenswrapper[4868]: I1003 13:04:02.851254 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zpf4m"] Oct 03 13:04:02 crc kubenswrapper[4868]: I1003 13:04:02.851956 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zpf4m" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="registry-server" containerID="cri-o://cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64" gracePeriod=2 Oct 03 13:04:02 crc kubenswrapper[4868]: I1003 13:04:02.906207 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" event={"ID":"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120","Type":"ContainerStarted","Data":"c1974dc22e0acb10ba8a3a8514cfbebf0c314a1ae3eec6c760915474abc0ab8c"} Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.219201 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.353277 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-catalog-content\") pod \"47c25bfa-f126-4dcd-84c0-2bea653db9df\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.353415 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-utilities\") pod \"47c25bfa-f126-4dcd-84c0-2bea653db9df\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.353454 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54mpj\" (UniqueName: \"kubernetes.io/projected/47c25bfa-f126-4dcd-84c0-2bea653db9df-kube-api-access-54mpj\") pod \"47c25bfa-f126-4dcd-84c0-2bea653db9df\" (UID: \"47c25bfa-f126-4dcd-84c0-2bea653db9df\") " Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.354193 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-utilities" (OuterVolumeSpecName: "utilities") pod "47c25bfa-f126-4dcd-84c0-2bea653db9df" (UID: "47c25bfa-f126-4dcd-84c0-2bea653db9df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.364194 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47c25bfa-f126-4dcd-84c0-2bea653db9df-kube-api-access-54mpj" (OuterVolumeSpecName: "kube-api-access-54mpj") pod "47c25bfa-f126-4dcd-84c0-2bea653db9df" (UID: "47c25bfa-f126-4dcd-84c0-2bea653db9df"). InnerVolumeSpecName "kube-api-access-54mpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.401195 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "47c25bfa-f126-4dcd-84c0-2bea653db9df" (UID: "47c25bfa-f126-4dcd-84c0-2bea653db9df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.454751 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54mpj\" (UniqueName: \"kubernetes.io/projected/47c25bfa-f126-4dcd-84c0-2bea653db9df-kube-api-access-54mpj\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.454793 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.454806 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/47c25bfa-f126-4dcd-84c0-2bea653db9df-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:03 crc kubenswrapper[4868]: E1003 13:04:03.768778 4868 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5056916_3ba0_49fb_951c_f1f14ad238d4.slice/crio-95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5056916_3ba0_49fb_951c_f1f14ad238d4.slice/crio-conmon-95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab.scope\": RecentStats: unable to find data in memory cache]" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.914190 4868 generic.go:334] "Generic (PLEG): container finished" podID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerID="cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64" exitCode=0 Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.914267 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zpf4m" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.914286 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zpf4m" event={"ID":"47c25bfa-f126-4dcd-84c0-2bea653db9df","Type":"ContainerDied","Data":"cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64"} Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.914753 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zpf4m" event={"ID":"47c25bfa-f126-4dcd-84c0-2bea653db9df","Type":"ContainerDied","Data":"420abd10128928d062bedd9596b01bcffd3d98d277dfe8d04cc80d8807b19ef6"} Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.914773 4868 scope.go:117] "RemoveContainer" containerID="cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.918274 4868 generic.go:334] "Generic (PLEG): container finished" podID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerID="95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab" exitCode=0 Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.918368 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f6jb7" event={"ID":"b5056916-3ba0-49fb-951c-f1f14ad238d4","Type":"ContainerDied","Data":"95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab"} Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.921474 4868 generic.go:334] "Generic (PLEG): container finished" podID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerID="c1974dc22e0acb10ba8a3a8514cfbebf0c314a1ae3eec6c760915474abc0ab8c" exitCode=0 Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.921686 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" event={"ID":"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120","Type":"ContainerDied","Data":"c1974dc22e0acb10ba8a3a8514cfbebf0c314a1ae3eec6c760915474abc0ab8c"} Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.939312 4868 scope.go:117] "RemoveContainer" containerID="e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.966497 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zpf4m"] Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.969682 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zpf4m"] Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.972332 4868 scope.go:117] "RemoveContainer" containerID="4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.992026 4868 scope.go:117] "RemoveContainer" containerID="cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64" Oct 03 13:04:03 crc kubenswrapper[4868]: E1003 13:04:03.992472 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64\": container with ID starting with cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64 not found: ID does not exist" containerID="cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.992502 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64"} err="failed to get container status \"cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64\": rpc error: code = NotFound desc = could not find container \"cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64\": container with ID starting with cb4b2994bce7b9000cea4f4220cd1fec0eff2a647a086730ce19e10289ae5b64 not found: ID does not exist" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.992524 4868 scope.go:117] "RemoveContainer" containerID="e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a" Oct 03 13:04:03 crc kubenswrapper[4868]: E1003 13:04:03.992832 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a\": container with ID starting with e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a not found: ID does not exist" containerID="e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.992852 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a"} err="failed to get container status \"e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a\": rpc error: code = NotFound desc = could not find container \"e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a\": container with ID starting with e47757923fd22c1e2494917921ea74e7aae4e8d198d4a568e6347efb6130a56a not found: ID does not exist" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.992864 4868 scope.go:117] "RemoveContainer" containerID="4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5" Oct 03 13:04:03 crc kubenswrapper[4868]: E1003 13:04:03.993415 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5\": container with ID starting with 4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5 not found: ID does not exist" containerID="4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5" Oct 03 13:04:03 crc kubenswrapper[4868]: I1003 13:04:03.993438 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5"} err="failed to get container status \"4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5\": rpc error: code = NotFound desc = could not find container \"4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5\": container with ID starting with 4bf227e5fbc1ba4a67af66e16bc6f9de8d96dfaebffa11b289f00e7a72649cd5 not found: ID does not exist" Oct 03 13:04:04 crc kubenswrapper[4868]: I1003 13:04:04.550778 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" path="/var/lib/kubelet/pods/47c25bfa-f126-4dcd-84c0-2bea653db9df/volumes" Oct 03 13:04:04 crc kubenswrapper[4868]: I1003 13:04:04.930805 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f6jb7" event={"ID":"b5056916-3ba0-49fb-951c-f1f14ad238d4","Type":"ContainerStarted","Data":"180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883"} Oct 03 13:04:04 crc kubenswrapper[4868]: I1003 13:04:04.932788 4868 generic.go:334] "Generic (PLEG): container finished" podID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerID="1e023f90159c9504ae984a48f24462a984084f74ddca3845654aa7a154b450cc" exitCode=0 Oct 03 13:04:04 crc kubenswrapper[4868]: I1003 13:04:04.940297 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" event={"ID":"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120","Type":"ContainerDied","Data":"1e023f90159c9504ae984a48f24462a984084f74ddca3845654aa7a154b450cc"} Oct 03 13:04:04 crc kubenswrapper[4868]: I1003 13:04:04.950952 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-f6jb7" podStartSLOduration=2.383128445 podStartE2EDuration="4.950935018s" podCreationTimestamp="2025-10-03 13:04:00 +0000 UTC" firstStartedPulling="2025-10-03 13:04:01.900069568 +0000 UTC m=+838.109918634" lastFinishedPulling="2025-10-03 13:04:04.467876141 +0000 UTC m=+840.677725207" observedRunningTime="2025-10-03 13:04:04.947404458 +0000 UTC m=+841.157253544" watchObservedRunningTime="2025-10-03 13:04:04.950935018 +0000 UTC m=+841.160784084" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.176750 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.287731 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-bundle\") pod \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.287801 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-util\") pod \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.287895 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vdrq6\" (UniqueName: \"kubernetes.io/projected/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-kube-api-access-vdrq6\") pod \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\" (UID: \"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120\") " Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.288371 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-bundle" (OuterVolumeSpecName: "bundle") pod "a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" (UID: "a1d6ff21-7d1b-46b1-9b66-4b15fcf46120"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.293291 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-kube-api-access-vdrq6" (OuterVolumeSpecName: "kube-api-access-vdrq6") pod "a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" (UID: "a1d6ff21-7d1b-46b1-9b66-4b15fcf46120"). InnerVolumeSpecName "kube-api-access-vdrq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.298884 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-util" (OuterVolumeSpecName: "util") pod "a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" (UID: "a1d6ff21-7d1b-46b1-9b66-4b15fcf46120"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.388967 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vdrq6\" (UniqueName: \"kubernetes.io/projected/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-kube-api-access-vdrq6\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.389014 4868 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.389024 4868 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a1d6ff21-7d1b-46b1-9b66-4b15fcf46120-util\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.954042 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" event={"ID":"a1d6ff21-7d1b-46b1-9b66-4b15fcf46120","Type":"ContainerDied","Data":"ec68405559bae354e2ac325252ec7eead4b1665990a2acba566c27901db31312"} Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.954414 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ec68405559bae354e2ac325252ec7eead4b1665990a2acba566c27901db31312" Oct 03 13:04:06 crc kubenswrapper[4868]: I1003 13:04:06.954108 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434404 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7"] Oct 03 13:04:08 crc kubenswrapper[4868]: E1003 13:04:08.434700 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="extract-utilities" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434715 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="extract-utilities" Oct 03 13:04:08 crc kubenswrapper[4868]: E1003 13:04:08.434724 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerName="extract" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434730 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerName="extract" Oct 03 13:04:08 crc kubenswrapper[4868]: E1003 13:04:08.434742 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerName="pull" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434748 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerName="pull" Oct 03 13:04:08 crc kubenswrapper[4868]: E1003 13:04:08.434759 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerName="util" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434765 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerName="util" Oct 03 13:04:08 crc kubenswrapper[4868]: E1003 13:04:08.434773 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="extract-content" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434779 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="extract-content" Oct 03 13:04:08 crc kubenswrapper[4868]: E1003 13:04:08.434786 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="registry-server" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434791 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="registry-server" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434882 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="47c25bfa-f126-4dcd-84c0-2bea653db9df" containerName="registry-server" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.434901 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1d6ff21-7d1b-46b1-9b66-4b15fcf46120" containerName="extract" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.435401 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.437661 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-jj9sb" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.438071 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.438665 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.448516 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7"] Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.515338 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdkww\" (UniqueName: \"kubernetes.io/projected/2784c314-41c6-4132-a03d-b54844ffd96e-kube-api-access-mdkww\") pod \"nmstate-operator-858ddd8f98-7zvh7\" (UID: \"2784c314-41c6-4132-a03d-b54844ffd96e\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.616607 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdkww\" (UniqueName: \"kubernetes.io/projected/2784c314-41c6-4132-a03d-b54844ffd96e-kube-api-access-mdkww\") pod \"nmstate-operator-858ddd8f98-7zvh7\" (UID: \"2784c314-41c6-4132-a03d-b54844ffd96e\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.633913 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdkww\" (UniqueName: \"kubernetes.io/projected/2784c314-41c6-4132-a03d-b54844ffd96e-kube-api-access-mdkww\") pod \"nmstate-operator-858ddd8f98-7zvh7\" (UID: \"2784c314-41c6-4132-a03d-b54844ffd96e\") " pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" Oct 03 13:04:08 crc kubenswrapper[4868]: I1003 13:04:08.751537 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" Oct 03 13:04:09 crc kubenswrapper[4868]: I1003 13:04:09.201295 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7"] Oct 03 13:04:09 crc kubenswrapper[4868]: W1003 13:04:09.205385 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2784c314_41c6_4132_a03d_b54844ffd96e.slice/crio-2becbaf3415e068ab5d3ecf77464213fe707556ef240035439a3dd2621b545bb WatchSource:0}: Error finding container 2becbaf3415e068ab5d3ecf77464213fe707556ef240035439a3dd2621b545bb: Status 404 returned error can't find the container with id 2becbaf3415e068ab5d3ecf77464213fe707556ef240035439a3dd2621b545bb Oct 03 13:04:09 crc kubenswrapper[4868]: I1003 13:04:09.972376 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" event={"ID":"2784c314-41c6-4132-a03d-b54844ffd96e","Type":"ContainerStarted","Data":"2becbaf3415e068ab5d3ecf77464213fe707556ef240035439a3dd2621b545bb"} Oct 03 13:04:10 crc kubenswrapper[4868]: I1003 13:04:10.598905 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:10 crc kubenswrapper[4868]: I1003 13:04:10.598964 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:10 crc kubenswrapper[4868]: I1003 13:04:10.639939 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:11 crc kubenswrapper[4868]: I1003 13:04:11.014040 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:12 crc kubenswrapper[4868]: I1003 13:04:12.850577 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f6jb7"] Oct 03 13:04:12 crc kubenswrapper[4868]: I1003 13:04:12.988188 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-f6jb7" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="registry-server" containerID="cri-o://180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883" gracePeriod=2 Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.601454 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.684463 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-catalog-content\") pod \"b5056916-3ba0-49fb-951c-f1f14ad238d4\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.684538 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-utilities\") pod \"b5056916-3ba0-49fb-951c-f1f14ad238d4\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.684649 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-682st\" (UniqueName: \"kubernetes.io/projected/b5056916-3ba0-49fb-951c-f1f14ad238d4-kube-api-access-682st\") pod \"b5056916-3ba0-49fb-951c-f1f14ad238d4\" (UID: \"b5056916-3ba0-49fb-951c-f1f14ad238d4\") " Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.685558 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-utilities" (OuterVolumeSpecName: "utilities") pod "b5056916-3ba0-49fb-951c-f1f14ad238d4" (UID: "b5056916-3ba0-49fb-951c-f1f14ad238d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.694265 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b5056916-3ba0-49fb-951c-f1f14ad238d4-kube-api-access-682st" (OuterVolumeSpecName: "kube-api-access-682st") pod "b5056916-3ba0-49fb-951c-f1f14ad238d4" (UID: "b5056916-3ba0-49fb-951c-f1f14ad238d4"). InnerVolumeSpecName "kube-api-access-682st". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.764073 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b5056916-3ba0-49fb-951c-f1f14ad238d4" (UID: "b5056916-3ba0-49fb-951c-f1f14ad238d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.785740 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-682st\" (UniqueName: \"kubernetes.io/projected/b5056916-3ba0-49fb-951c-f1f14ad238d4-kube-api-access-682st\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.785783 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.785793 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b5056916-3ba0-49fb-951c-f1f14ad238d4-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.995528 4868 generic.go:334] "Generic (PLEG): container finished" podID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerID="180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883" exitCode=0 Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.995607 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f6jb7" event={"ID":"b5056916-3ba0-49fb-951c-f1f14ad238d4","Type":"ContainerDied","Data":"180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883"} Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.995639 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-f6jb7" event={"ID":"b5056916-3ba0-49fb-951c-f1f14ad238d4","Type":"ContainerDied","Data":"550314169d8df16f665f82d6519a6a09eafa1d765c317c4856287f46eed841ec"} Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.995660 4868 scope.go:117] "RemoveContainer" containerID="180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.996577 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-f6jb7" Oct 03 13:04:13 crc kubenswrapper[4868]: I1003 13:04:13.997608 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" event={"ID":"2784c314-41c6-4132-a03d-b54844ffd96e","Type":"ContainerStarted","Data":"fd155a690c8e84bedc05f264793959bc95713feee1412fad77d98a23840a0ef7"} Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.010428 4868 scope.go:117] "RemoveContainer" containerID="95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.024371 4868 scope.go:117] "RemoveContainer" containerID="b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.024619 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-858ddd8f98-7zvh7" podStartSLOduration=1.662531129 podStartE2EDuration="6.024602782s" podCreationTimestamp="2025-10-03 13:04:08 +0000 UTC" firstStartedPulling="2025-10-03 13:04:09.207511361 +0000 UTC m=+845.417360427" lastFinishedPulling="2025-10-03 13:04:13.569583004 +0000 UTC m=+849.779432080" observedRunningTime="2025-10-03 13:04:14.020344268 +0000 UTC m=+850.230193324" watchObservedRunningTime="2025-10-03 13:04:14.024602782 +0000 UTC m=+850.234451848" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.036492 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-f6jb7"] Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.039732 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-f6jb7"] Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.048579 4868 scope.go:117] "RemoveContainer" containerID="180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883" Oct 03 13:04:14 crc kubenswrapper[4868]: E1003 13:04:14.049241 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883\": container with ID starting with 180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883 not found: ID does not exist" containerID="180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.049297 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883"} err="failed to get container status \"180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883\": rpc error: code = NotFound desc = could not find container \"180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883\": container with ID starting with 180636497a9539256007aa0f956259cb07ed795fbd6085e65fa0a0f68b8cf883 not found: ID does not exist" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.049325 4868 scope.go:117] "RemoveContainer" containerID="95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab" Oct 03 13:04:14 crc kubenswrapper[4868]: E1003 13:04:14.049690 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab\": container with ID starting with 95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab not found: ID does not exist" containerID="95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.049739 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab"} err="failed to get container status \"95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab\": rpc error: code = NotFound desc = could not find container \"95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab\": container with ID starting with 95bfc207482580650383ad803c636bb6ef6e5e875ae810905965ec735c1e5bab not found: ID does not exist" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.049765 4868 scope.go:117] "RemoveContainer" containerID="b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e" Oct 03 13:04:14 crc kubenswrapper[4868]: E1003 13:04:14.050215 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e\": container with ID starting with b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e not found: ID does not exist" containerID="b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.050243 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e"} err="failed to get container status \"b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e\": rpc error: code = NotFound desc = could not find container \"b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e\": container with ID starting with b6902adc922e4965b319d6153d7c30e26c2efe6ea6ae22e2f9e225702adea13e not found: ID does not exist" Oct 03 13:04:14 crc kubenswrapper[4868]: I1003 13:04:14.567973 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" path="/var/lib/kubelet/pods/b5056916-3ba0-49fb-951c-f1f14ad238d4/volumes" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.038705 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q"] Oct 03 13:04:18 crc kubenswrapper[4868]: E1003 13:04:18.039422 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="extract-content" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.039445 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="extract-content" Oct 03 13:04:18 crc kubenswrapper[4868]: E1003 13:04:18.039478 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="extract-utilities" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.039488 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="extract-utilities" Oct 03 13:04:18 crc kubenswrapper[4868]: E1003 13:04:18.039501 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="registry-server" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.039511 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="registry-server" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.039682 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b5056916-3ba0-49fb-951c-f1f14ad238d4" containerName="registry-server" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.040583 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.042604 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-k4mpp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.050345 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.051201 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.054204 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.054528 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.064417 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.082494 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-nznm4"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.083404 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.144242 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/55f22459-1eaa-4ce8-bd82-6b3c62c57d80-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-ksvpj\" (UID: \"55f22459-1eaa-4ce8-bd82-6b3c62c57d80\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.144514 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jzvhg\" (UniqueName: \"kubernetes.io/projected/55f22459-1eaa-4ce8-bd82-6b3c62c57d80-kube-api-access-jzvhg\") pod \"nmstate-webhook-6cdbc54649-ksvpj\" (UID: \"55f22459-1eaa-4ce8-bd82-6b3c62c57d80\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.144680 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6vngz\" (UniqueName: \"kubernetes.io/projected/6f27da3a-f372-4a3f-a88d-a99db0ade467-kube-api-access-6vngz\") pod \"nmstate-metrics-fdff9cb8d-vlb8q\" (UID: \"6f27da3a-f372-4a3f-a88d-a99db0ade467\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.183664 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.184539 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.189666 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.189699 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-7cnm8" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.189778 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.226025 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.246448 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-dbus-socket\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.246529 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jzvhg\" (UniqueName: \"kubernetes.io/projected/55f22459-1eaa-4ce8-bd82-6b3c62c57d80-kube-api-access-jzvhg\") pod \"nmstate-webhook-6cdbc54649-ksvpj\" (UID: \"55f22459-1eaa-4ce8-bd82-6b3c62c57d80\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.246710 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6vngz\" (UniqueName: \"kubernetes.io/projected/6f27da3a-f372-4a3f-a88d-a99db0ade467-kube-api-access-6vngz\") pod \"nmstate-metrics-fdff9cb8d-vlb8q\" (UID: \"6f27da3a-f372-4a3f-a88d-a99db0ade467\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.246813 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/55f22459-1eaa-4ce8-bd82-6b3c62c57d80-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-ksvpj\" (UID: \"55f22459-1eaa-4ce8-bd82-6b3c62c57d80\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.246857 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-nmstate-lock\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.246881 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pj677\" (UniqueName: \"kubernetes.io/projected/a3945860-da06-49b9-b1fb-824d976dbcb5-kube-api-access-pj677\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.247247 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-ovs-socket\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.255974 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/55f22459-1eaa-4ce8-bd82-6b3c62c57d80-tls-key-pair\") pod \"nmstate-webhook-6cdbc54649-ksvpj\" (UID: \"55f22459-1eaa-4ce8-bd82-6b3c62c57d80\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.263611 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jzvhg\" (UniqueName: \"kubernetes.io/projected/55f22459-1eaa-4ce8-bd82-6b3c62c57d80-kube-api-access-jzvhg\") pod \"nmstate-webhook-6cdbc54649-ksvpj\" (UID: \"55f22459-1eaa-4ce8-bd82-6b3c62c57d80\") " pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.263647 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6vngz\" (UniqueName: \"kubernetes.io/projected/6f27da3a-f372-4a3f-a88d-a99db0ade467-kube-api-access-6vngz\") pod \"nmstate-metrics-fdff9cb8d-vlb8q\" (UID: \"6f27da3a-f372-4a3f-a88d-a99db0ade467\") " pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.348931 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-nmstate-lock\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.348977 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pj677\" (UniqueName: \"kubernetes.io/projected/a3945860-da06-49b9-b1fb-824d976dbcb5-kube-api-access-pj677\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349004 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349047 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349091 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-ovs-socket\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349130 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-dbus-socket\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349163 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fd2gr\" (UniqueName: \"kubernetes.io/projected/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-kube-api-access-fd2gr\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349301 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-ovs-socket\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349449 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-nmstate-lock\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.349533 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/a3945860-da06-49b9-b1fb-824d976dbcb5-dbus-socket\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.355916 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.368864 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pj677\" (UniqueName: \"kubernetes.io/projected/a3945860-da06-49b9-b1fb-824d976dbcb5-kube-api-access-pj677\") pod \"nmstate-handler-nznm4\" (UID: \"a3945860-da06-49b9-b1fb-824d976dbcb5\") " pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.371347 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.372882 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-67c7449f96-8675g"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.373725 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.393082 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-67c7449f96-8675g"] Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.410867 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450159 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-oauth-serving-cert\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450258 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fd2gr\" (UniqueName: \"kubernetes.io/projected/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-kube-api-access-fd2gr\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450285 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4488d535-d535-40d6-a68a-b8a76bbbb480-console-serving-cert\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450309 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-service-ca\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450333 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-console-config\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450364 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450403 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450429 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcpfm\" (UniqueName: \"kubernetes.io/projected/4488d535-d535-40d6-a68a-b8a76bbbb480-kube-api-access-jcpfm\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450479 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4488d535-d535-40d6-a68a-b8a76bbbb480-console-oauth-config\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.450514 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-trusted-ca-bundle\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.451491 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-nginx-conf\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.455140 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-plugin-serving-cert\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.471640 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fd2gr\" (UniqueName: \"kubernetes.io/projected/e0c4f245-e33e-43f7-9a23-dd6de5ffd531-kube-api-access-fd2gr\") pod \"nmstate-console-plugin-6b874cbd85-tjlzp\" (UID: \"e0c4f245-e33e-43f7-9a23-dd6de5ffd531\") " pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.503566 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.551174 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-oauth-serving-cert\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.552131 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4488d535-d535-40d6-a68a-b8a76bbbb480-console-serving-cert\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.552080 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-oauth-serving-cert\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.552254 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-service-ca\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.552277 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-console-config\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.553037 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcpfm\" (UniqueName: \"kubernetes.io/projected/4488d535-d535-40d6-a68a-b8a76bbbb480-kube-api-access-jcpfm\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.553108 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4488d535-d535-40d6-a68a-b8a76bbbb480-console-oauth-config\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.553184 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-trusted-ca-bundle\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.553878 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-console-config\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.553998 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-service-ca\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.554525 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4488d535-d535-40d6-a68a-b8a76bbbb480-trusted-ca-bundle\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.557711 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/4488d535-d535-40d6-a68a-b8a76bbbb480-console-serving-cert\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.565268 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/4488d535-d535-40d6-a68a-b8a76bbbb480-console-oauth-config\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.570278 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcpfm\" (UniqueName: \"kubernetes.io/projected/4488d535-d535-40d6-a68a-b8a76bbbb480-kube-api-access-jcpfm\") pod \"console-67c7449f96-8675g\" (UID: \"4488d535-d535-40d6-a68a-b8a76bbbb480\") " pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.713176 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp"] Oct 03 13:04:18 crc kubenswrapper[4868]: W1003 13:04:18.719514 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode0c4f245_e33e_43f7_9a23_dd6de5ffd531.slice/crio-d7ff680063b15cf69d33f9821e3b5844f9d0ac4fd3c8653411dbb64c2f7c2cc5 WatchSource:0}: Error finding container d7ff680063b15cf69d33f9821e3b5844f9d0ac4fd3c8653411dbb64c2f7c2cc5: Status 404 returned error can't find the container with id d7ff680063b15cf69d33f9821e3b5844f9d0ac4fd3c8653411dbb64c2f7c2cc5 Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.755284 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.790491 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q"] Oct 03 13:04:18 crc kubenswrapper[4868]: W1003 13:04:18.797596 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f27da3a_f372_4a3f_a88d_a99db0ade467.slice/crio-7a30b015fcc334b6a66a37743cd2e6d59ecd9ff578d2c4a129c22e06adfb1dcf WatchSource:0}: Error finding container 7a30b015fcc334b6a66a37743cd2e6d59ecd9ff578d2c4a129c22e06adfb1dcf: Status 404 returned error can't find the container with id 7a30b015fcc334b6a66a37743cd2e6d59ecd9ff578d2c4a129c22e06adfb1dcf Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.859034 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj"] Oct 03 13:04:18 crc kubenswrapper[4868]: W1003 13:04:18.864237 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55f22459_1eaa_4ce8_bd82_6b3c62c57d80.slice/crio-e2dec5a04176fb9dbc24c8bf3c12c99c3e706c89d51dd7349d5e3143a6731d3b WatchSource:0}: Error finding container e2dec5a04176fb9dbc24c8bf3c12c99c3e706c89d51dd7349d5e3143a6731d3b: Status 404 returned error can't find the container with id e2dec5a04176fb9dbc24c8bf3c12c99c3e706c89d51dd7349d5e3143a6731d3b Oct 03 13:04:18 crc kubenswrapper[4868]: I1003 13:04:18.956712 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-67c7449f96-8675g"] Oct 03 13:04:18 crc kubenswrapper[4868]: W1003 13:04:18.979093 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4488d535_d535_40d6_a68a_b8a76bbbb480.slice/crio-2bdc8d7da9b6320fb6fe403fd7a85e122f10ae5e780dce7743cd0b6a188c3c0e WatchSource:0}: Error finding container 2bdc8d7da9b6320fb6fe403fd7a85e122f10ae5e780dce7743cd0b6a188c3c0e: Status 404 returned error can't find the container with id 2bdc8d7da9b6320fb6fe403fd7a85e122f10ae5e780dce7743cd0b6a188c3c0e Oct 03 13:04:19 crc kubenswrapper[4868]: I1003 13:04:19.031792 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" event={"ID":"55f22459-1eaa-4ce8-bd82-6b3c62c57d80","Type":"ContainerStarted","Data":"e2dec5a04176fb9dbc24c8bf3c12c99c3e706c89d51dd7349d5e3143a6731d3b"} Oct 03 13:04:19 crc kubenswrapper[4868]: I1003 13:04:19.032708 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-nznm4" event={"ID":"a3945860-da06-49b9-b1fb-824d976dbcb5","Type":"ContainerStarted","Data":"f34011f63e1450b2ada2288e8456ffc9ac4469c801cdf0aa33799fc93372fe19"} Oct 03 13:04:19 crc kubenswrapper[4868]: I1003 13:04:19.033850 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-67c7449f96-8675g" event={"ID":"4488d535-d535-40d6-a68a-b8a76bbbb480","Type":"ContainerStarted","Data":"2bdc8d7da9b6320fb6fe403fd7a85e122f10ae5e780dce7743cd0b6a188c3c0e"} Oct 03 13:04:19 crc kubenswrapper[4868]: I1003 13:04:19.034900 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" event={"ID":"6f27da3a-f372-4a3f-a88d-a99db0ade467","Type":"ContainerStarted","Data":"7a30b015fcc334b6a66a37743cd2e6d59ecd9ff578d2c4a129c22e06adfb1dcf"} Oct 03 13:04:19 crc kubenswrapper[4868]: I1003 13:04:19.035646 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" event={"ID":"e0c4f245-e33e-43f7-9a23-dd6de5ffd531","Type":"ContainerStarted","Data":"d7ff680063b15cf69d33f9821e3b5844f9d0ac4fd3c8653411dbb64c2f7c2cc5"} Oct 03 13:04:20 crc kubenswrapper[4868]: I1003 13:04:20.042300 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-67c7449f96-8675g" event={"ID":"4488d535-d535-40d6-a68a-b8a76bbbb480","Type":"ContainerStarted","Data":"a4ba6abfb03b17dc00dfc4d758aa5e470fb6d4d5d5d7c6226fa1d7bd9adf0b4a"} Oct 03 13:04:20 crc kubenswrapper[4868]: I1003 13:04:20.062478 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-67c7449f96-8675g" podStartSLOduration=2.062456099 podStartE2EDuration="2.062456099s" podCreationTimestamp="2025-10-03 13:04:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:04:20.060865116 +0000 UTC m=+856.270714202" watchObservedRunningTime="2025-10-03 13:04:20.062456099 +0000 UTC m=+856.272305165" Oct 03 13:04:23 crc kubenswrapper[4868]: I1003 13:04:23.059418 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" event={"ID":"6f27da3a-f372-4a3f-a88d-a99db0ade467","Type":"ContainerStarted","Data":"828b2d0a57953aa59414d2109eb17b5a7c93d60abd54fd5a8b070b27c36e519c"} Oct 03 13:04:23 crc kubenswrapper[4868]: I1003 13:04:23.061044 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" event={"ID":"e0c4f245-e33e-43f7-9a23-dd6de5ffd531","Type":"ContainerStarted","Data":"64d4213f74de780adae1e69bd649ecee562ce536b6c9a8db8839946a0ca7b488"} Oct 03 13:04:23 crc kubenswrapper[4868]: I1003 13:04:23.062387 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" event={"ID":"55f22459-1eaa-4ce8-bd82-6b3c62c57d80","Type":"ContainerStarted","Data":"5958a4efb59eb9f5839515de3e5ac9668394cc78e4df1a1d2f83b82276d5b002"} Oct 03 13:04:23 crc kubenswrapper[4868]: I1003 13:04:23.062597 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:23 crc kubenswrapper[4868]: I1003 13:04:23.079273 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-6b874cbd85-tjlzp" podStartSLOduration=1.739612524 podStartE2EDuration="5.079251815s" podCreationTimestamp="2025-10-03 13:04:18 +0000 UTC" firstStartedPulling="2025-10-03 13:04:18.721493839 +0000 UTC m=+854.931342905" lastFinishedPulling="2025-10-03 13:04:22.06113312 +0000 UTC m=+858.270982196" observedRunningTime="2025-10-03 13:04:23.075264128 +0000 UTC m=+859.285113194" watchObservedRunningTime="2025-10-03 13:04:23.079251815 +0000 UTC m=+859.289100881" Oct 03 13:04:23 crc kubenswrapper[4868]: I1003 13:04:23.099431 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" podStartSLOduration=1.904442563 podStartE2EDuration="5.099412535s" podCreationTimestamp="2025-10-03 13:04:18 +0000 UTC" firstStartedPulling="2025-10-03 13:04:18.866209559 +0000 UTC m=+855.076058625" lastFinishedPulling="2025-10-03 13:04:22.061179531 +0000 UTC m=+858.271028597" observedRunningTime="2025-10-03 13:04:23.096678662 +0000 UTC m=+859.306527728" watchObservedRunningTime="2025-10-03 13:04:23.099412535 +0000 UTC m=+859.309261601" Oct 03 13:04:25 crc kubenswrapper[4868]: I1003 13:04:25.075359 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" event={"ID":"6f27da3a-f372-4a3f-a88d-a99db0ade467","Type":"ContainerStarted","Data":"e0ace6687ae0c0741e64db6be0158dcc3204f90ccfa7aa0eb38ac43e2d144b38"} Oct 03 13:04:25 crc kubenswrapper[4868]: I1003 13:04:25.079752 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-nznm4" event={"ID":"a3945860-da06-49b9-b1fb-824d976dbcb5","Type":"ContainerStarted","Data":"167c795f5465fafd929632a554f7776f0a34d507d69e429e6d837b60019c52ba"} Oct 03 13:04:25 crc kubenswrapper[4868]: I1003 13:04:25.079808 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:25 crc kubenswrapper[4868]: I1003 13:04:25.101234 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-fdff9cb8d-vlb8q" podStartSLOduration=1.197566692 podStartE2EDuration="7.101214381s" podCreationTimestamp="2025-10-03 13:04:18 +0000 UTC" firstStartedPulling="2025-10-03 13:04:18.80061152 +0000 UTC m=+855.010460586" lastFinishedPulling="2025-10-03 13:04:24.704259209 +0000 UTC m=+860.914108275" observedRunningTime="2025-10-03 13:04:25.098440817 +0000 UTC m=+861.308289903" watchObservedRunningTime="2025-10-03 13:04:25.101214381 +0000 UTC m=+861.311063457" Oct 03 13:04:25 crc kubenswrapper[4868]: I1003 13:04:25.115731 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-nznm4" podStartSLOduration=1.68949134 podStartE2EDuration="7.1157131s" podCreationTimestamp="2025-10-03 13:04:18 +0000 UTC" firstStartedPulling="2025-10-03 13:04:18.455725034 +0000 UTC m=+854.665574100" lastFinishedPulling="2025-10-03 13:04:23.881946794 +0000 UTC m=+860.091795860" observedRunningTime="2025-10-03 13:04:25.112973916 +0000 UTC m=+861.322822982" watchObservedRunningTime="2025-10-03 13:04:25.1157131 +0000 UTC m=+861.325562166" Oct 03 13:04:26 crc kubenswrapper[4868]: I1003 13:04:26.856513 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-qnbfb"] Oct 03 13:04:26 crc kubenswrapper[4868]: I1003 13:04:26.857991 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:26 crc kubenswrapper[4868]: I1003 13:04:26.889536 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qnbfb"] Oct 03 13:04:26 crc kubenswrapper[4868]: I1003 13:04:26.967167 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vz9zx\" (UniqueName: \"kubernetes.io/projected/7b3086f3-95a0-4114-9a41-c2a7bac113c6-kube-api-access-vz9zx\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:26 crc kubenswrapper[4868]: I1003 13:04:26.967267 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-catalog-content\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:26 crc kubenswrapper[4868]: I1003 13:04:26.967385 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-utilities\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.068734 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vz9zx\" (UniqueName: \"kubernetes.io/projected/7b3086f3-95a0-4114-9a41-c2a7bac113c6-kube-api-access-vz9zx\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.068861 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-catalog-content\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.068888 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-utilities\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.069607 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-utilities\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.069753 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-catalog-content\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.089811 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vz9zx\" (UniqueName: \"kubernetes.io/projected/7b3086f3-95a0-4114-9a41-c2a7bac113c6-kube-api-access-vz9zx\") pod \"redhat-marketplace-qnbfb\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.182014 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:27 crc kubenswrapper[4868]: I1003 13:04:27.607670 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-qnbfb"] Oct 03 13:04:27 crc kubenswrapper[4868]: W1003 13:04:27.616127 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7b3086f3_95a0_4114_9a41_c2a7bac113c6.slice/crio-b1bdc61579c2820f6bb1d4a7972a3da2f9b8695feddbb61e1b88dc44aa9af7e9 WatchSource:0}: Error finding container b1bdc61579c2820f6bb1d4a7972a3da2f9b8695feddbb61e1b88dc44aa9af7e9: Status 404 returned error can't find the container with id b1bdc61579c2820f6bb1d4a7972a3da2f9b8695feddbb61e1b88dc44aa9af7e9 Oct 03 13:04:28 crc kubenswrapper[4868]: I1003 13:04:28.096713 4868 generic.go:334] "Generic (PLEG): container finished" podID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerID="a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06" exitCode=0 Oct 03 13:04:28 crc kubenswrapper[4868]: I1003 13:04:28.096774 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qnbfb" event={"ID":"7b3086f3-95a0-4114-9a41-c2a7bac113c6","Type":"ContainerDied","Data":"a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06"} Oct 03 13:04:28 crc kubenswrapper[4868]: I1003 13:04:28.096808 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qnbfb" event={"ID":"7b3086f3-95a0-4114-9a41-c2a7bac113c6","Type":"ContainerStarted","Data":"b1bdc61579c2820f6bb1d4a7972a3da2f9b8695feddbb61e1b88dc44aa9af7e9"} Oct 03 13:04:28 crc kubenswrapper[4868]: I1003 13:04:28.756177 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:28 crc kubenswrapper[4868]: I1003 13:04:28.756579 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:28 crc kubenswrapper[4868]: I1003 13:04:28.760950 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:29 crc kubenswrapper[4868]: I1003 13:04:29.103671 4868 generic.go:334] "Generic (PLEG): container finished" podID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerID="8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d" exitCode=0 Oct 03 13:04:29 crc kubenswrapper[4868]: I1003 13:04:29.103780 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qnbfb" event={"ID":"7b3086f3-95a0-4114-9a41-c2a7bac113c6","Type":"ContainerDied","Data":"8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d"} Oct 03 13:04:29 crc kubenswrapper[4868]: I1003 13:04:29.111140 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-67c7449f96-8675g" Oct 03 13:04:29 crc kubenswrapper[4868]: I1003 13:04:29.172467 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dq7lc"] Oct 03 13:04:30 crc kubenswrapper[4868]: I1003 13:04:30.111334 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qnbfb" event={"ID":"7b3086f3-95a0-4114-9a41-c2a7bac113c6","Type":"ContainerStarted","Data":"b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005"} Oct 03 13:04:30 crc kubenswrapper[4868]: I1003 13:04:30.128228 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-qnbfb" podStartSLOduration=2.537551034 podStartE2EDuration="4.128204007s" podCreationTimestamp="2025-10-03 13:04:26 +0000 UTC" firstStartedPulling="2025-10-03 13:04:28.098537194 +0000 UTC m=+864.308386250" lastFinishedPulling="2025-10-03 13:04:29.689190157 +0000 UTC m=+865.899039223" observedRunningTime="2025-10-03 13:04:30.127603732 +0000 UTC m=+866.337452798" watchObservedRunningTime="2025-10-03 13:04:30.128204007 +0000 UTC m=+866.338053083" Oct 03 13:04:33 crc kubenswrapper[4868]: I1003 13:04:33.435872 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-nznm4" Oct 03 13:04:37 crc kubenswrapper[4868]: I1003 13:04:37.182970 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:37 crc kubenswrapper[4868]: I1003 13:04:37.183404 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:37 crc kubenswrapper[4868]: I1003 13:04:37.222636 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:38 crc kubenswrapper[4868]: I1003 13:04:38.209560 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:38 crc kubenswrapper[4868]: I1003 13:04:38.255978 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qnbfb"] Oct 03 13:04:38 crc kubenswrapper[4868]: I1003 13:04:38.379241 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-6cdbc54649-ksvpj" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.174217 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-qnbfb" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="registry-server" containerID="cri-o://b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005" gracePeriod=2 Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.234201 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m4wqp"] Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.235385 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.243506 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4wqp"] Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.359017 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8238c1e-4090-4e0b-b833-621b910b8879-utilities\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.359829 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g82h\" (UniqueName: \"kubernetes.io/projected/e8238c1e-4090-4e0b-b833-621b910b8879-kube-api-access-6g82h\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.359928 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8238c1e-4090-4e0b-b833-621b910b8879-catalog-content\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.462106 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8238c1e-4090-4e0b-b833-621b910b8879-utilities\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.462182 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g82h\" (UniqueName: \"kubernetes.io/projected/e8238c1e-4090-4e0b-b833-621b910b8879-kube-api-access-6g82h\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.462211 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8238c1e-4090-4e0b-b833-621b910b8879-catalog-content\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.462850 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e8238c1e-4090-4e0b-b833-621b910b8879-catalog-content\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.463235 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e8238c1e-4090-4e0b-b833-621b910b8879-utilities\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.487392 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g82h\" (UniqueName: \"kubernetes.io/projected/e8238c1e-4090-4e0b-b833-621b910b8879-kube-api-access-6g82h\") pod \"certified-operators-m4wqp\" (UID: \"e8238c1e-4090-4e0b-b833-621b910b8879\") " pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.563174 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.604762 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.767977 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-utilities\") pod \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.768030 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vz9zx\" (UniqueName: \"kubernetes.io/projected/7b3086f3-95a0-4114-9a41-c2a7bac113c6-kube-api-access-vz9zx\") pod \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.768085 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-catalog-content\") pod \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\" (UID: \"7b3086f3-95a0-4114-9a41-c2a7bac113c6\") " Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.768878 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-utilities" (OuterVolumeSpecName: "utilities") pod "7b3086f3-95a0-4114-9a41-c2a7bac113c6" (UID: "7b3086f3-95a0-4114-9a41-c2a7bac113c6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.772594 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b3086f3-95a0-4114-9a41-c2a7bac113c6-kube-api-access-vz9zx" (OuterVolumeSpecName: "kube-api-access-vz9zx") pod "7b3086f3-95a0-4114-9a41-c2a7bac113c6" (UID: "7b3086f3-95a0-4114-9a41-c2a7bac113c6"). InnerVolumeSpecName "kube-api-access-vz9zx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.785330 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b3086f3-95a0-4114-9a41-c2a7bac113c6" (UID: "7b3086f3-95a0-4114-9a41-c2a7bac113c6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.869249 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.869285 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vz9zx\" (UniqueName: \"kubernetes.io/projected/7b3086f3-95a0-4114-9a41-c2a7bac113c6-kube-api-access-vz9zx\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:40 crc kubenswrapper[4868]: I1003 13:04:40.869294 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b3086f3-95a0-4114-9a41-c2a7bac113c6-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.004296 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4wqp"] Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.186313 4868 generic.go:334] "Generic (PLEG): container finished" podID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerID="b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005" exitCode=0 Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.186386 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-qnbfb" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.186388 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qnbfb" event={"ID":"7b3086f3-95a0-4114-9a41-c2a7bac113c6","Type":"ContainerDied","Data":"b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005"} Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.186777 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-qnbfb" event={"ID":"7b3086f3-95a0-4114-9a41-c2a7bac113c6","Type":"ContainerDied","Data":"b1bdc61579c2820f6bb1d4a7972a3da2f9b8695feddbb61e1b88dc44aa9af7e9"} Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.186803 4868 scope.go:117] "RemoveContainer" containerID="b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.189393 4868 generic.go:334] "Generic (PLEG): container finished" podID="e8238c1e-4090-4e0b-b833-621b910b8879" containerID="87cb184f21ba844aad434cb29188bba76944d7eae091bbcd0265ba900f964af1" exitCode=0 Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.189424 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4wqp" event={"ID":"e8238c1e-4090-4e0b-b833-621b910b8879","Type":"ContainerDied","Data":"87cb184f21ba844aad434cb29188bba76944d7eae091bbcd0265ba900f964af1"} Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.189442 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4wqp" event={"ID":"e8238c1e-4090-4e0b-b833-621b910b8879","Type":"ContainerStarted","Data":"fd24168a950209ac44a429cb3338212b982e5be6f10db4005db09b2738b99f02"} Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.219277 4868 scope.go:117] "RemoveContainer" containerID="8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.230263 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-qnbfb"] Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.237406 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-qnbfb"] Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.249548 4868 scope.go:117] "RemoveContainer" containerID="a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.266040 4868 scope.go:117] "RemoveContainer" containerID="b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005" Oct 03 13:04:41 crc kubenswrapper[4868]: E1003 13:04:41.266525 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005\": container with ID starting with b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005 not found: ID does not exist" containerID="b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.266561 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005"} err="failed to get container status \"b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005\": rpc error: code = NotFound desc = could not find container \"b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005\": container with ID starting with b8aa00df08b02c8d212ce10ee4ecc3436b8952c10813936da57e92a3c3316005 not found: ID does not exist" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.266584 4868 scope.go:117] "RemoveContainer" containerID="8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d" Oct 03 13:04:41 crc kubenswrapper[4868]: E1003 13:04:41.267011 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d\": container with ID starting with 8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d not found: ID does not exist" containerID="8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.267155 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d"} err="failed to get container status \"8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d\": rpc error: code = NotFound desc = could not find container \"8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d\": container with ID starting with 8e36897f231c22e9c276128face1c3c2d202f8ce76c0982ba4bb3c196a01fa2d not found: ID does not exist" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.267170 4868 scope.go:117] "RemoveContainer" containerID="a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06" Oct 03 13:04:41 crc kubenswrapper[4868]: E1003 13:04:41.267439 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06\": container with ID starting with a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06 not found: ID does not exist" containerID="a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06" Oct 03 13:04:41 crc kubenswrapper[4868]: I1003 13:04:41.267469 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06"} err="failed to get container status \"a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06\": rpc error: code = NotFound desc = could not find container \"a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06\": container with ID starting with a3d43cd498746cb7cd72f04a61ead06017a697962481df32dedfe9160c7dcb06 not found: ID does not exist" Oct 03 13:04:42 crc kubenswrapper[4868]: I1003 13:04:42.551140 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" path="/var/lib/kubelet/pods/7b3086f3-95a0-4114-9a41-c2a7bac113c6/volumes" Oct 03 13:04:45 crc kubenswrapper[4868]: I1003 13:04:45.216211 4868 generic.go:334] "Generic (PLEG): container finished" podID="e8238c1e-4090-4e0b-b833-621b910b8879" containerID="d83bfb379ccb7dbd7726983c2bdd7cd71071565e3638d4fd616f30bf97c75f7f" exitCode=0 Oct 03 13:04:45 crc kubenswrapper[4868]: I1003 13:04:45.216271 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4wqp" event={"ID":"e8238c1e-4090-4e0b-b833-621b910b8879","Type":"ContainerDied","Data":"d83bfb379ccb7dbd7726983c2bdd7cd71071565e3638d4fd616f30bf97c75f7f"} Oct 03 13:04:46 crc kubenswrapper[4868]: I1003 13:04:46.228752 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m4wqp" event={"ID":"e8238c1e-4090-4e0b-b833-621b910b8879","Type":"ContainerStarted","Data":"534c1cdecf41da078be729cd1b3e46c842085a315b2280b591ae990a06450917"} Oct 03 13:04:46 crc kubenswrapper[4868]: I1003 13:04:46.259213 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m4wqp" podStartSLOduration=1.7585757210000001 podStartE2EDuration="6.259183656s" podCreationTimestamp="2025-10-03 13:04:40 +0000 UTC" firstStartedPulling="2025-10-03 13:04:41.19043248 +0000 UTC m=+877.400281546" lastFinishedPulling="2025-10-03 13:04:45.691040415 +0000 UTC m=+881.900889481" observedRunningTime="2025-10-03 13:04:46.253381221 +0000 UTC m=+882.463230287" watchObservedRunningTime="2025-10-03 13:04:46.259183656 +0000 UTC m=+882.469032722" Oct 03 13:04:50 crc kubenswrapper[4868]: I1003 13:04:50.563921 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:50 crc kubenswrapper[4868]: I1003 13:04:50.564584 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:50 crc kubenswrapper[4868]: I1003 13:04:50.606077 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.255706 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt"] Oct 03 13:04:51 crc kubenswrapper[4868]: E1003 13:04:51.256074 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="extract-content" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.256131 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="extract-content" Oct 03 13:04:51 crc kubenswrapper[4868]: E1003 13:04:51.256146 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="registry-server" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.256152 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="registry-server" Oct 03 13:04:51 crc kubenswrapper[4868]: E1003 13:04:51.256166 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="extract-utilities" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.256173 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="extract-utilities" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.256328 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b3086f3-95a0-4114-9a41-c2a7bac113c6" containerName="registry-server" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.257525 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.259522 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.263777 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt"] Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.301615 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m4wqp" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.365202 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.365305 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwf2k\" (UniqueName: \"kubernetes.io/projected/09388f41-23a9-4759-b318-4694ef98e81a-kube-api-access-vwf2k\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.365386 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.466673 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.466745 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.466798 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwf2k\" (UniqueName: \"kubernetes.io/projected/09388f41-23a9-4759-b318-4694ef98e81a-kube-api-access-vwf2k\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.467410 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-bundle\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.467682 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-util\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.488390 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwf2k\" (UniqueName: \"kubernetes.io/projected/09388f41-23a9-4759-b318-4694ef98e81a-kube-api-access-vwf2k\") pod \"8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.588765 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:51 crc kubenswrapper[4868]: I1003 13:04:51.786936 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt"] Oct 03 13:04:52 crc kubenswrapper[4868]: I1003 13:04:52.265907 4868 generic.go:334] "Generic (PLEG): container finished" podID="09388f41-23a9-4759-b318-4694ef98e81a" containerID="69206a3008a84f445de126acbb53eb75144f06d089f147c449616d31939bf8d2" exitCode=0 Oct 03 13:04:52 crc kubenswrapper[4868]: I1003 13:04:52.267198 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" event={"ID":"09388f41-23a9-4759-b318-4694ef98e81a","Type":"ContainerDied","Data":"69206a3008a84f445de126acbb53eb75144f06d089f147c449616d31939bf8d2"} Oct 03 13:04:52 crc kubenswrapper[4868]: I1003 13:04:52.267251 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" event={"ID":"09388f41-23a9-4759-b318-4694ef98e81a","Type":"ContainerStarted","Data":"602010b02d9f67e2869ea4493db64d0626dbde8e5332d7afe6c70d09ffb54212"} Oct 03 13:04:54 crc kubenswrapper[4868]: I1003 13:04:54.088696 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m4wqp"] Oct 03 13:04:54 crc kubenswrapper[4868]: I1003 13:04:54.210544 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-dq7lc" podUID="a4cfd01a-748d-42ec-9d69-bdf306168942" containerName="console" containerID="cri-o://bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a" gracePeriod=15 Oct 03 13:04:54 crc kubenswrapper[4868]: I1003 13:04:54.455952 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bx6ln"] Oct 03 13:04:54 crc kubenswrapper[4868]: I1003 13:04:54.456845 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bx6ln" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="registry-server" containerID="cri-o://7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4" gracePeriod=2 Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.179988 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dq7lc_a4cfd01a-748d-42ec-9d69-bdf306168942/console/0.log" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.180394 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.266570 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.292251 4868 generic.go:334] "Generic (PLEG): container finished" podID="09388f41-23a9-4759-b318-4694ef98e81a" containerID="e43bc19239d340e34cc3454667c72750c843d74efb30e86365cd1240b1fffc8d" exitCode=0 Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.292330 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" event={"ID":"09388f41-23a9-4759-b318-4694ef98e81a","Type":"ContainerDied","Data":"e43bc19239d340e34cc3454667c72750c843d74efb30e86365cd1240b1fffc8d"} Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.303018 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-dq7lc_a4cfd01a-748d-42ec-9d69-bdf306168942/console/0.log" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.303092 4868 generic.go:334] "Generic (PLEG): container finished" podID="a4cfd01a-748d-42ec-9d69-bdf306168942" containerID="bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a" exitCode=2 Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.303165 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dq7lc" event={"ID":"a4cfd01a-748d-42ec-9d69-bdf306168942","Type":"ContainerDied","Data":"bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a"} Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.303186 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-dq7lc" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.303215 4868 scope.go:117] "RemoveContainer" containerID="bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.303198 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-dq7lc" event={"ID":"a4cfd01a-748d-42ec-9d69-bdf306168942","Type":"ContainerDied","Data":"f609dc3aee397a55645af6e9ebbad797b3acb487897a1a5bc4bc2c4d27fccff2"} Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.308721 4868 generic.go:334] "Generic (PLEG): container finished" podID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerID="7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4" exitCode=0 Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.308770 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bx6ln" event={"ID":"055b5bf7-16bd-4acb-8d96-a5678f86f0c2","Type":"ContainerDied","Data":"7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4"} Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.308800 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bx6ln" event={"ID":"055b5bf7-16bd-4acb-8d96-a5678f86f0c2","Type":"ContainerDied","Data":"534906dfc9579a9527cdeef2e93dea57974c14eb2fc44152b7d0e73b3554e77e"} Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.308906 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bx6ln" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.324924 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-console-config\") pod \"a4cfd01a-748d-42ec-9d69-bdf306168942\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.324992 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-oauth-serving-cert\") pod \"a4cfd01a-748d-42ec-9d69-bdf306168942\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.325019 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-trusted-ca-bundle\") pod \"a4cfd01a-748d-42ec-9d69-bdf306168942\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.325136 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-serving-cert\") pod \"a4cfd01a-748d-42ec-9d69-bdf306168942\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.325202 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-service-ca\") pod \"a4cfd01a-748d-42ec-9d69-bdf306168942\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.325224 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-oauth-config\") pod \"a4cfd01a-748d-42ec-9d69-bdf306168942\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.325243 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hlf5l\" (UniqueName: \"kubernetes.io/projected/a4cfd01a-748d-42ec-9d69-bdf306168942-kube-api-access-hlf5l\") pod \"a4cfd01a-748d-42ec-9d69-bdf306168942\" (UID: \"a4cfd01a-748d-42ec-9d69-bdf306168942\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.326952 4868 scope.go:117] "RemoveContainer" containerID="bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.327079 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "a4cfd01a-748d-42ec-9d69-bdf306168942" (UID: "a4cfd01a-748d-42ec-9d69-bdf306168942"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.327149 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-console-config" (OuterVolumeSpecName: "console-config") pod "a4cfd01a-748d-42ec-9d69-bdf306168942" (UID: "a4cfd01a-748d-42ec-9d69-bdf306168942"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.327172 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "a4cfd01a-748d-42ec-9d69-bdf306168942" (UID: "a4cfd01a-748d-42ec-9d69-bdf306168942"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.328202 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-service-ca" (OuterVolumeSpecName: "service-ca") pod "a4cfd01a-748d-42ec-9d69-bdf306168942" (UID: "a4cfd01a-748d-42ec-9d69-bdf306168942"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: E1003 13:04:55.328318 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a\": container with ID starting with bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a not found: ID does not exist" containerID="bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.328388 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a"} err="failed to get container status \"bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a\": rpc error: code = NotFound desc = could not find container \"bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a\": container with ID starting with bccc65c724a949729c641d2c961fa6f09426931cc34faabc727e21c911b4286a not found: ID does not exist" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.328420 4868 scope.go:117] "RemoveContainer" containerID="7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.334813 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4cfd01a-748d-42ec-9d69-bdf306168942-kube-api-access-hlf5l" (OuterVolumeSpecName: "kube-api-access-hlf5l") pod "a4cfd01a-748d-42ec-9d69-bdf306168942" (UID: "a4cfd01a-748d-42ec-9d69-bdf306168942"). InnerVolumeSpecName "kube-api-access-hlf5l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.334857 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "a4cfd01a-748d-42ec-9d69-bdf306168942" (UID: "a4cfd01a-748d-42ec-9d69-bdf306168942"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.335361 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "a4cfd01a-748d-42ec-9d69-bdf306168942" (UID: "a4cfd01a-748d-42ec-9d69-bdf306168942"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.371962 4868 scope.go:117] "RemoveContainer" containerID="49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.400949 4868 scope.go:117] "RemoveContainer" containerID="20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426091 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-utilities\") pod \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426175 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-catalog-content\") pod \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426221 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fhpz8\" (UniqueName: \"kubernetes.io/projected/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-kube-api-access-fhpz8\") pod \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\" (UID: \"055b5bf7-16bd-4acb-8d96-a5678f86f0c2\") " Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426522 4868 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-oauth-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426544 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hlf5l\" (UniqueName: \"kubernetes.io/projected/a4cfd01a-748d-42ec-9d69-bdf306168942-kube-api-access-hlf5l\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426555 4868 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-console-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426564 4868 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426572 4868 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426581 4868 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/a4cfd01a-748d-42ec-9d69-bdf306168942-console-serving-cert\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.426589 4868 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/a4cfd01a-748d-42ec-9d69-bdf306168942-service-ca\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.427501 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-utilities" (OuterVolumeSpecName: "utilities") pod "055b5bf7-16bd-4acb-8d96-a5678f86f0c2" (UID: "055b5bf7-16bd-4acb-8d96-a5678f86f0c2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.427633 4868 scope.go:117] "RemoveContainer" containerID="7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4" Oct 03 13:04:55 crc kubenswrapper[4868]: E1003 13:04:55.427989 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4\": container with ID starting with 7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4 not found: ID does not exist" containerID="7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.428035 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4"} err="failed to get container status \"7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4\": rpc error: code = NotFound desc = could not find container \"7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4\": container with ID starting with 7b6581358d89cb35a3982f9f7bb8614e696f36698c8594f67398d2a6a718b6b4 not found: ID does not exist" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.428079 4868 scope.go:117] "RemoveContainer" containerID="49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306" Oct 03 13:04:55 crc kubenswrapper[4868]: E1003 13:04:55.428277 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306\": container with ID starting with 49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306 not found: ID does not exist" containerID="49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.428300 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306"} err="failed to get container status \"49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306\": rpc error: code = NotFound desc = could not find container \"49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306\": container with ID starting with 49c74734302f95315e8e19b105821d7aa948a5810798d0329224b4b008f60306 not found: ID does not exist" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.428313 4868 scope.go:117] "RemoveContainer" containerID="20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542" Oct 03 13:04:55 crc kubenswrapper[4868]: E1003 13:04:55.428491 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542\": container with ID starting with 20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542 not found: ID does not exist" containerID="20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.428524 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542"} err="failed to get container status \"20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542\": rpc error: code = NotFound desc = could not find container \"20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542\": container with ID starting with 20193c091809213d6a26431b5212716a99410dd4cec9239c41a566f56005c542 not found: ID does not exist" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.432142 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-kube-api-access-fhpz8" (OuterVolumeSpecName: "kube-api-access-fhpz8") pod "055b5bf7-16bd-4acb-8d96-a5678f86f0c2" (UID: "055b5bf7-16bd-4acb-8d96-a5678f86f0c2"). InnerVolumeSpecName "kube-api-access-fhpz8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.472005 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "055b5bf7-16bd-4acb-8d96-a5678f86f0c2" (UID: "055b5bf7-16bd-4acb-8d96-a5678f86f0c2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.528514 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.528576 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.528595 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fhpz8\" (UniqueName: \"kubernetes.io/projected/055b5bf7-16bd-4acb-8d96-a5678f86f0c2-kube-api-access-fhpz8\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.698330 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bx6ln"] Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.702697 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bx6ln"] Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.706206 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-dq7lc"] Oct 03 13:04:55 crc kubenswrapper[4868]: I1003 13:04:55.708918 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-dq7lc"] Oct 03 13:04:56 crc kubenswrapper[4868]: I1003 13:04:56.318070 4868 generic.go:334] "Generic (PLEG): container finished" podID="09388f41-23a9-4759-b318-4694ef98e81a" containerID="53d971ce326749292c1e621fb427561d240c264bd0ab2465ce7c6e620e89e08b" exitCode=0 Oct 03 13:04:56 crc kubenswrapper[4868]: I1003 13:04:56.318783 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" event={"ID":"09388f41-23a9-4759-b318-4694ef98e81a","Type":"ContainerDied","Data":"53d971ce326749292c1e621fb427561d240c264bd0ab2465ce7c6e620e89e08b"} Oct 03 13:04:56 crc kubenswrapper[4868]: I1003 13:04:56.551544 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" path="/var/lib/kubelet/pods/055b5bf7-16bd-4acb-8d96-a5678f86f0c2/volumes" Oct 03 13:04:56 crc kubenswrapper[4868]: I1003 13:04:56.552542 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4cfd01a-748d-42ec-9d69-bdf306168942" path="/var/lib/kubelet/pods/a4cfd01a-748d-42ec-9d69-bdf306168942/volumes" Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.549816 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.655005 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-bundle\") pod \"09388f41-23a9-4759-b318-4694ef98e81a\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.655171 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwf2k\" (UniqueName: \"kubernetes.io/projected/09388f41-23a9-4759-b318-4694ef98e81a-kube-api-access-vwf2k\") pod \"09388f41-23a9-4759-b318-4694ef98e81a\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.655190 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-util\") pod \"09388f41-23a9-4759-b318-4694ef98e81a\" (UID: \"09388f41-23a9-4759-b318-4694ef98e81a\") " Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.656664 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-bundle" (OuterVolumeSpecName: "bundle") pod "09388f41-23a9-4759-b318-4694ef98e81a" (UID: "09388f41-23a9-4759-b318-4694ef98e81a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.660348 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09388f41-23a9-4759-b318-4694ef98e81a-kube-api-access-vwf2k" (OuterVolumeSpecName: "kube-api-access-vwf2k") pod "09388f41-23a9-4759-b318-4694ef98e81a" (UID: "09388f41-23a9-4759-b318-4694ef98e81a"). InnerVolumeSpecName "kube-api-access-vwf2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.696961 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-util" (OuterVolumeSpecName: "util") pod "09388f41-23a9-4759-b318-4694ef98e81a" (UID: "09388f41-23a9-4759-b318-4694ef98e81a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.756553 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwf2k\" (UniqueName: \"kubernetes.io/projected/09388f41-23a9-4759-b318-4694ef98e81a-kube-api-access-vwf2k\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.756605 4868 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-util\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:57 crc kubenswrapper[4868]: I1003 13:04:57.756618 4868 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/09388f41-23a9-4759-b318-4694ef98e81a-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:04:58 crc kubenswrapper[4868]: I1003 13:04:58.328766 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" event={"ID":"09388f41-23a9-4759-b318-4694ef98e81a","Type":"ContainerDied","Data":"602010b02d9f67e2869ea4493db64d0626dbde8e5332d7afe6c70d09ffb54212"} Oct 03 13:04:58 crc kubenswrapper[4868]: I1003 13:04:58.328800 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt" Oct 03 13:04:58 crc kubenswrapper[4868]: I1003 13:04:58.328804 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="602010b02d9f67e2869ea4493db64d0626dbde8e5332d7afe6c70d09ffb54212" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.313274 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc"] Oct 03 13:05:09 crc kubenswrapper[4868]: E1003 13:05:09.314110 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4cfd01a-748d-42ec-9d69-bdf306168942" containerName="console" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314126 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4cfd01a-748d-42ec-9d69-bdf306168942" containerName="console" Oct 03 13:05:09 crc kubenswrapper[4868]: E1003 13:05:09.314140 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="registry-server" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314147 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="registry-server" Oct 03 13:05:09 crc kubenswrapper[4868]: E1003 13:05:09.314158 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="extract-content" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314182 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="extract-content" Oct 03 13:05:09 crc kubenswrapper[4868]: E1003 13:05:09.314200 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09388f41-23a9-4759-b318-4694ef98e81a" containerName="util" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314207 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="09388f41-23a9-4759-b318-4694ef98e81a" containerName="util" Oct 03 13:05:09 crc kubenswrapper[4868]: E1003 13:05:09.314221 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09388f41-23a9-4759-b318-4694ef98e81a" containerName="pull" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314228 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="09388f41-23a9-4759-b318-4694ef98e81a" containerName="pull" Oct 03 13:05:09 crc kubenswrapper[4868]: E1003 13:05:09.314237 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09388f41-23a9-4759-b318-4694ef98e81a" containerName="extract" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314244 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="09388f41-23a9-4759-b318-4694ef98e81a" containerName="extract" Oct 03 13:05:09 crc kubenswrapper[4868]: E1003 13:05:09.314254 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="extract-utilities" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314264 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="extract-utilities" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314435 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="055b5bf7-16bd-4acb-8d96-a5678f86f0c2" containerName="registry-server" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314450 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="09388f41-23a9-4759-b318-4694ef98e81a" containerName="extract" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.314526 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4cfd01a-748d-42ec-9d69-bdf306168942" containerName="console" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.315114 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.316840 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-4zrxk" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.317254 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.317498 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.318013 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.319248 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.326493 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc"] Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.414160 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-csrwp\" (UniqueName: \"kubernetes.io/projected/22faf835-b22c-40ca-b38b-d3749dd60a3c-kube-api-access-csrwp\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.414283 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22faf835-b22c-40ca-b38b-d3749dd60a3c-apiservice-cert\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.414307 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22faf835-b22c-40ca-b38b-d3749dd60a3c-webhook-cert\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.515699 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22faf835-b22c-40ca-b38b-d3749dd60a3c-apiservice-cert\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.515742 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22faf835-b22c-40ca-b38b-d3749dd60a3c-webhook-cert\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.515772 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-csrwp\" (UniqueName: \"kubernetes.io/projected/22faf835-b22c-40ca-b38b-d3749dd60a3c-kube-api-access-csrwp\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.522168 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/22faf835-b22c-40ca-b38b-d3749dd60a3c-apiservice-cert\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.522596 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/22faf835-b22c-40ca-b38b-d3749dd60a3c-webhook-cert\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.534527 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-csrwp\" (UniqueName: \"kubernetes.io/projected/22faf835-b22c-40ca-b38b-d3749dd60a3c-kube-api-access-csrwp\") pod \"metallb-operator-controller-manager-6bc4b54f6f-wtfqc\" (UID: \"22faf835-b22c-40ca-b38b-d3749dd60a3c\") " pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.558028 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv"] Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.558902 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.561370 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.561485 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.567099 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-xjphw" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.576414 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv"] Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.637443 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.717827 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-apiservice-cert\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.718192 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hh6c7\" (UniqueName: \"kubernetes.io/projected/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-kube-api-access-hh6c7\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.718231 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-webhook-cert\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.819709 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-webhook-cert\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.819814 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-apiservice-cert\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.819861 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hh6c7\" (UniqueName: \"kubernetes.io/projected/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-kube-api-access-hh6c7\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.832835 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-webhook-cert\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.844896 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-apiservice-cert\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.845155 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hh6c7\" (UniqueName: \"kubernetes.io/projected/37f71aac-03c4-41ee-9f10-27cc8a5bcbfb-kube-api-access-hh6c7\") pod \"metallb-operator-webhook-server-f7bdd5677-d29dv\" (UID: \"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb\") " pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.898045 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc"] Oct 03 13:05:09 crc kubenswrapper[4868]: I1003 13:05:09.899946 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:09 crc kubenswrapper[4868]: W1003 13:05:09.904455 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22faf835_b22c_40ca_b38b_d3749dd60a3c.slice/crio-d0021d3cbe515f8b879ef1ab2170184041aedb3f6384de2877ba19e9d2235514 WatchSource:0}: Error finding container d0021d3cbe515f8b879ef1ab2170184041aedb3f6384de2877ba19e9d2235514: Status 404 returned error can't find the container with id d0021d3cbe515f8b879ef1ab2170184041aedb3f6384de2877ba19e9d2235514 Oct 03 13:05:10 crc kubenswrapper[4868]: I1003 13:05:10.233864 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv"] Oct 03 13:05:10 crc kubenswrapper[4868]: W1003 13:05:10.243643 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37f71aac_03c4_41ee_9f10_27cc8a5bcbfb.slice/crio-927178f15fafa1bcd80d9f12aeb6aa18784fc0ac6e51760b39ba17e322369469 WatchSource:0}: Error finding container 927178f15fafa1bcd80d9f12aeb6aa18784fc0ac6e51760b39ba17e322369469: Status 404 returned error can't find the container with id 927178f15fafa1bcd80d9f12aeb6aa18784fc0ac6e51760b39ba17e322369469 Oct 03 13:05:10 crc kubenswrapper[4868]: I1003 13:05:10.397719 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" event={"ID":"22faf835-b22c-40ca-b38b-d3749dd60a3c","Type":"ContainerStarted","Data":"d0021d3cbe515f8b879ef1ab2170184041aedb3f6384de2877ba19e9d2235514"} Oct 03 13:05:10 crc kubenswrapper[4868]: I1003 13:05:10.399596 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" event={"ID":"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb","Type":"ContainerStarted","Data":"927178f15fafa1bcd80d9f12aeb6aa18784fc0ac6e51760b39ba17e322369469"} Oct 03 13:05:15 crc kubenswrapper[4868]: I1003 13:05:15.429148 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" event={"ID":"22faf835-b22c-40ca-b38b-d3749dd60a3c","Type":"ContainerStarted","Data":"9d2546da81ee8d92538b0b0ca425e679d522a92f1e59c875b27bd9aac5cb7d27"} Oct 03 13:05:15 crc kubenswrapper[4868]: I1003 13:05:15.429692 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:15 crc kubenswrapper[4868]: I1003 13:05:15.431441 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" event={"ID":"37f71aac-03c4-41ee-9f10-27cc8a5bcbfb","Type":"ContainerStarted","Data":"291a140717182e4aa61ef324aa9a47e275ff8b76ddcbe36d26836c38dc573b0f"} Oct 03 13:05:15 crc kubenswrapper[4868]: I1003 13:05:15.431603 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:15 crc kubenswrapper[4868]: I1003 13:05:15.450511 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" podStartSLOduration=1.749448608 podStartE2EDuration="6.450493206s" podCreationTimestamp="2025-10-03 13:05:09 +0000 UTC" firstStartedPulling="2025-10-03 13:05:09.915644184 +0000 UTC m=+906.125493250" lastFinishedPulling="2025-10-03 13:05:14.616688782 +0000 UTC m=+910.826537848" observedRunningTime="2025-10-03 13:05:15.446423657 +0000 UTC m=+911.656272733" watchObservedRunningTime="2025-10-03 13:05:15.450493206 +0000 UTC m=+911.660342282" Oct 03 13:05:15 crc kubenswrapper[4868]: I1003 13:05:15.467740 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" podStartSLOduration=2.096873571 podStartE2EDuration="6.467723958s" podCreationTimestamp="2025-10-03 13:05:09 +0000 UTC" firstStartedPulling="2025-10-03 13:05:10.24713769 +0000 UTC m=+906.456986746" lastFinishedPulling="2025-10-03 13:05:14.617988067 +0000 UTC m=+910.827837133" observedRunningTime="2025-10-03 13:05:15.465891539 +0000 UTC m=+911.675740605" watchObservedRunningTime="2025-10-03 13:05:15.467723958 +0000 UTC m=+911.677573024" Oct 03 13:05:29 crc kubenswrapper[4868]: I1003 13:05:29.905679 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-f7bdd5677-d29dv" Oct 03 13:05:32 crc kubenswrapper[4868]: I1003 13:05:32.145306 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:05:32 crc kubenswrapper[4868]: I1003 13:05:32.146156 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:05:49 crc kubenswrapper[4868]: I1003 13:05:49.640575 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-6bc4b54f6f-wtfqc" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.358817 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-gnnv9"] Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.361229 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.364186 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k"] Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.364383 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.364805 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.364862 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-qr27q" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.365341 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.366283 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.418260 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k"] Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.455830 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a-cert\") pod \"frr-k8s-webhook-server-64bf5d555-vhc7k\" (UID: \"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.455897 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsdgv\" (UniqueName: \"kubernetes.io/projected/1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a-kube-api-access-nsdgv\") pod \"frr-k8s-webhook-server-64bf5d555-vhc7k\" (UID: \"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.455936 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-sockets\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.455960 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-metrics-certs\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.455987 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jkm6h\" (UniqueName: \"kubernetes.io/projected/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-kube-api-access-jkm6h\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.456115 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-reloader\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.456156 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-startup\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.456188 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-metrics\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.456242 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-conf\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.458896 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-s7m4r"] Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.463249 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.468121 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.470710 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.471289 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-26ft5" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.471450 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.492637 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-68d546b9d8-v8khj"] Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.493686 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.499637 4868 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.512481 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-v8khj"] Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.557919 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.557990 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnhm4\" (UniqueName: \"kubernetes.io/projected/7210f599-a063-4c96-80ad-44928756136c-kube-api-access-hnhm4\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558022 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a-cert\") pod \"frr-k8s-webhook-server-64bf5d555-vhc7k\" (UID: \"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558069 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pxldk\" (UniqueName: \"kubernetes.io/projected/8f841000-9d62-4031-ace5-fd99a8d1409a-kube-api-access-pxldk\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558094 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-cert\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558120 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-metrics-certs\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558142 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsdgv\" (UniqueName: \"kubernetes.io/projected/1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a-kube-api-access-nsdgv\") pod \"frr-k8s-webhook-server-64bf5d555-vhc7k\" (UID: \"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558174 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-sockets\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558194 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-metrics-certs\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558215 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jkm6h\" (UniqueName: \"kubernetes.io/projected/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-kube-api-access-jkm6h\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558237 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-reloader\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558258 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-startup\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558280 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-metrics-certs\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558301 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/7210f599-a063-4c96-80ad-44928756136c-metallb-excludel2\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558320 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-metrics\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558350 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-conf\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558687 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-sockets\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.558773 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-conf\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.559118 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-metrics\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.559220 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-reloader\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.560459 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-frr-startup\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.563674 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a-cert\") pod \"frr-k8s-webhook-server-64bf5d555-vhc7k\" (UID: \"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.594539 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-metrics-certs\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.605325 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jkm6h\" (UniqueName: \"kubernetes.io/projected/7d68d329-fabf-44a2-a134-c5ccfe0ddf96-kube-api-access-jkm6h\") pod \"frr-k8s-gnnv9\" (UID: \"7d68d329-fabf-44a2-a134-c5ccfe0ddf96\") " pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.640199 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsdgv\" (UniqueName: \"kubernetes.io/projected/1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a-kube-api-access-nsdgv\") pod \"frr-k8s-webhook-server-64bf5d555-vhc7k\" (UID: \"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a\") " pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.660774 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.661497 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnhm4\" (UniqueName: \"kubernetes.io/projected/7210f599-a063-4c96-80ad-44928756136c-kube-api-access-hnhm4\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.661614 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pxldk\" (UniqueName: \"kubernetes.io/projected/8f841000-9d62-4031-ace5-fd99a8d1409a-kube-api-access-pxldk\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.661688 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-cert\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.661763 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-metrics-certs\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.661883 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-metrics-certs\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.661961 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/7210f599-a063-4c96-80ad-44928756136c-metallb-excludel2\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: E1003 13:05:50.662771 4868 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 13:05:50 crc kubenswrapper[4868]: E1003 13:05:50.662879 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist podName:7210f599-a063-4c96-80ad-44928756136c nodeName:}" failed. No retries permitted until 2025-10-03 13:05:51.162860733 +0000 UTC m=+947.372709799 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist") pod "speaker-s7m4r" (UID: "7210f599-a063-4c96-80ad-44928756136c") : secret "metallb-memberlist" not found Oct 03 13:05:50 crc kubenswrapper[4868]: E1003 13:05:50.665233 4868 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Oct 03 13:05:50 crc kubenswrapper[4868]: E1003 13:05:50.665346 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-metrics-certs podName:8f841000-9d62-4031-ace5-fd99a8d1409a nodeName:}" failed. No retries permitted until 2025-10-03 13:05:51.165319849 +0000 UTC m=+947.375168915 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-metrics-certs") pod "controller-68d546b9d8-v8khj" (UID: "8f841000-9d62-4031-ace5-fd99a8d1409a") : secret "controller-certs-secret" not found Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.666635 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/7210f599-a063-4c96-80ad-44928756136c-metallb-excludel2\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.679703 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-metrics-certs\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.689345 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.690659 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-cert\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.700696 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.701551 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pxldk\" (UniqueName: \"kubernetes.io/projected/8f841000-9d62-4031-ace5-fd99a8d1409a-kube-api-access-pxldk\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:50 crc kubenswrapper[4868]: I1003 13:05:50.707549 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnhm4\" (UniqueName: \"kubernetes.io/projected/7210f599-a063-4c96-80ad-44928756136c-kube-api-access-hnhm4\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.148203 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k"] Oct 03 13:05:51 crc kubenswrapper[4868]: W1003 13:05:51.155573 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1c72f7f5_4ce6_4b3a_8d99_592c1c809f9a.slice/crio-1877a70f83d4a751055c89b674093f719ba2799f71adf267678e03186be28dd2 WatchSource:0}: Error finding container 1877a70f83d4a751055c89b674093f719ba2799f71adf267678e03186be28dd2: Status 404 returned error can't find the container with id 1877a70f83d4a751055c89b674093f719ba2799f71adf267678e03186be28dd2 Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.167492 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.167638 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-metrics-certs\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:51 crc kubenswrapper[4868]: E1003 13:05:51.167691 4868 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Oct 03 13:05:51 crc kubenswrapper[4868]: E1003 13:05:51.167769 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist podName:7210f599-a063-4c96-80ad-44928756136c nodeName:}" failed. No retries permitted until 2025-10-03 13:05:52.167749188 +0000 UTC m=+948.377598244 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist") pod "speaker-s7m4r" (UID: "7210f599-a063-4c96-80ad-44928756136c") : secret "metallb-memberlist" not found Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.171669 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/8f841000-9d62-4031-ace5-fd99a8d1409a-metrics-certs\") pod \"controller-68d546b9d8-v8khj\" (UID: \"8f841000-9d62-4031-ace5-fd99a8d1409a\") " pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.414166 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.618879 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-68d546b9d8-v8khj"] Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.656618 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" event={"ID":"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a","Type":"ContainerStarted","Data":"1877a70f83d4a751055c89b674093f719ba2799f71adf267678e03186be28dd2"} Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.657616 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-v8khj" event={"ID":"8f841000-9d62-4031-ace5-fd99a8d1409a","Type":"ContainerStarted","Data":"d64f5008ae488f2f4e84543960e77fd5453742e8bf3d7884dac995acfd41b713"} Oct 03 13:05:51 crc kubenswrapper[4868]: I1003 13:05:51.658415 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerStarted","Data":"55c30a63d0626d1df0a1347498a7ea65161fc91d13d5784aa47e89eea2dc742a"} Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.181039 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.186988 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/7210f599-a063-4c96-80ad-44928756136c-memberlist\") pod \"speaker-s7m4r\" (UID: \"7210f599-a063-4c96-80ad-44928756136c\") " pod="metallb-system/speaker-s7m4r" Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.286034 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-s7m4r" Oct 03 13:05:52 crc kubenswrapper[4868]: W1003 13:05:52.306754 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7210f599_a063_4c96_80ad_44928756136c.slice/crio-affc78974cd3eb53915910e520236a9f8c1ce5ba5c1de286e5554aea6c56fcf4 WatchSource:0}: Error finding container affc78974cd3eb53915910e520236a9f8c1ce5ba5c1de286e5554aea6c56fcf4: Status 404 returned error can't find the container with id affc78974cd3eb53915910e520236a9f8c1ce5ba5c1de286e5554aea6c56fcf4 Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.692217 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-v8khj" event={"ID":"8f841000-9d62-4031-ace5-fd99a8d1409a","Type":"ContainerStarted","Data":"abed6f1a911e467a0f5a8a11e417a2f489b59859ab9c8b59771894d0da0a50b4"} Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.692282 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-68d546b9d8-v8khj" event={"ID":"8f841000-9d62-4031-ace5-fd99a8d1409a","Type":"ContainerStarted","Data":"67b7f589a41add3149dad65e9eb5e1ded3a57d70b92fb87945667dd0d4b67d0c"} Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.692337 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.699040 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-s7m4r" event={"ID":"7210f599-a063-4c96-80ad-44928756136c","Type":"ContainerStarted","Data":"449ebe72ed2cc4ba5e488c8053f9c71d3df1422d38413231380f4c2dc27b8b3d"} Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.699147 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-s7m4r" event={"ID":"7210f599-a063-4c96-80ad-44928756136c","Type":"ContainerStarted","Data":"affc78974cd3eb53915910e520236a9f8c1ce5ba5c1de286e5554aea6c56fcf4"} Oct 03 13:05:52 crc kubenswrapper[4868]: I1003 13:05:52.729490 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-68d546b9d8-v8khj" podStartSLOduration=2.729469966 podStartE2EDuration="2.729469966s" podCreationTimestamp="2025-10-03 13:05:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:05:52.728668704 +0000 UTC m=+948.938517770" watchObservedRunningTime="2025-10-03 13:05:52.729469966 +0000 UTC m=+948.939319052" Oct 03 13:05:53 crc kubenswrapper[4868]: I1003 13:05:53.713413 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-s7m4r" event={"ID":"7210f599-a063-4c96-80ad-44928756136c","Type":"ContainerStarted","Data":"6756d83be3eede2b271ae047e42f8319ee801f354920b54b39432d377201262e"} Oct 03 13:05:54 crc kubenswrapper[4868]: I1003 13:05:54.579332 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-s7m4r" podStartSLOduration=4.579312878 podStartE2EDuration="4.579312878s" podCreationTimestamp="2025-10-03 13:05:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:05:53.731821788 +0000 UTC m=+949.941670864" watchObservedRunningTime="2025-10-03 13:05:54.579312878 +0000 UTC m=+950.789161954" Oct 03 13:05:54 crc kubenswrapper[4868]: I1003 13:05:54.720906 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-s7m4r" Oct 03 13:06:00 crc kubenswrapper[4868]: I1003 13:06:00.761896 4868 generic.go:334] "Generic (PLEG): container finished" podID="7d68d329-fabf-44a2-a134-c5ccfe0ddf96" containerID="b66c45516b5cad767e79d7dd7cabfbfbd8a91b858ae6a13c1c74e4b28897de85" exitCode=0 Oct 03 13:06:00 crc kubenswrapper[4868]: I1003 13:06:00.761982 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerDied","Data":"b66c45516b5cad767e79d7dd7cabfbfbd8a91b858ae6a13c1c74e4b28897de85"} Oct 03 13:06:00 crc kubenswrapper[4868]: I1003 13:06:00.764022 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" event={"ID":"1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a","Type":"ContainerStarted","Data":"5d9085badcef5dd73e746385ddb65cd9a6fd649b0bc38d8a8721d72802296302"} Oct 03 13:06:01 crc kubenswrapper[4868]: I1003 13:06:01.420085 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-68d546b9d8-v8khj" Oct 03 13:06:01 crc kubenswrapper[4868]: I1003 13:06:01.769121 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:06:01 crc kubenswrapper[4868]: I1003 13:06:01.815274 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" podStartSLOduration=2.714094683 podStartE2EDuration="11.815253274s" podCreationTimestamp="2025-10-03 13:05:50 +0000 UTC" firstStartedPulling="2025-10-03 13:05:51.158695685 +0000 UTC m=+947.368544741" lastFinishedPulling="2025-10-03 13:06:00.259854266 +0000 UTC m=+956.469703332" observedRunningTime="2025-10-03 13:06:01.814834153 +0000 UTC m=+958.024683239" watchObservedRunningTime="2025-10-03 13:06:01.815253274 +0000 UTC m=+958.025102350" Oct 03 13:06:02 crc kubenswrapper[4868]: I1003 13:06:02.146043 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:06:02 crc kubenswrapper[4868]: I1003 13:06:02.146370 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:06:02 crc kubenswrapper[4868]: I1003 13:06:02.290900 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-s7m4r" Oct 03 13:06:02 crc kubenswrapper[4868]: I1003 13:06:02.777313 4868 generic.go:334] "Generic (PLEG): container finished" podID="7d68d329-fabf-44a2-a134-c5ccfe0ddf96" containerID="cf0dcda08baf5791dcf56c23a479186ce8128672e52c5108374188aede3ea702" exitCode=0 Oct 03 13:06:02 crc kubenswrapper[4868]: I1003 13:06:02.777474 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerDied","Data":"cf0dcda08baf5791dcf56c23a479186ce8128672e52c5108374188aede3ea702"} Oct 03 13:06:03 crc kubenswrapper[4868]: I1003 13:06:03.785809 4868 generic.go:334] "Generic (PLEG): container finished" podID="7d68d329-fabf-44a2-a134-c5ccfe0ddf96" containerID="8e4b15f3eed49188cfc0b8faf4811ddac57f0b7622dbf83072e853ec316ca88e" exitCode=0 Oct 03 13:06:03 crc kubenswrapper[4868]: I1003 13:06:03.785870 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerDied","Data":"8e4b15f3eed49188cfc0b8faf4811ddac57f0b7622dbf83072e853ec316ca88e"} Oct 03 13:06:04 crc kubenswrapper[4868]: I1003 13:06:04.798953 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerStarted","Data":"e94cec2b46ee75fb2a92e40cb23f0a0912c15e3216c822881d71d8921fe95028"} Oct 03 13:06:04 crc kubenswrapper[4868]: I1003 13:06:04.799342 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerStarted","Data":"8bdf554d7e88db1d97b4a1bb6e6f3551fb51ed9a42932cf7a5d320a4fba260a8"} Oct 03 13:06:04 crc kubenswrapper[4868]: I1003 13:06:04.799361 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerStarted","Data":"95800f5cf05129e8b36e74c86959642531b2709644967d6a21fb39d62e88a198"} Oct 03 13:06:04 crc kubenswrapper[4868]: I1003 13:06:04.799376 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerStarted","Data":"9582e84abc551dc7ac692845c2b2042ce573961dd434a88b47344827f5f799e4"} Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.237521 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-qjcbk"] Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.238809 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qjcbk" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.246137 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.246647 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-vkmsb" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.246905 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.268016 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qjcbk"] Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.372450 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrxtv\" (UniqueName: \"kubernetes.io/projected/efd6a0ef-6a5b-4308-933a-4751c3df6381-kube-api-access-qrxtv\") pod \"openstack-operator-index-qjcbk\" (UID: \"efd6a0ef-6a5b-4308-933a-4751c3df6381\") " pod="openstack-operators/openstack-operator-index-qjcbk" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.474197 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrxtv\" (UniqueName: \"kubernetes.io/projected/efd6a0ef-6a5b-4308-933a-4751c3df6381-kube-api-access-qrxtv\") pod \"openstack-operator-index-qjcbk\" (UID: \"efd6a0ef-6a5b-4308-933a-4751c3df6381\") " pod="openstack-operators/openstack-operator-index-qjcbk" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.511429 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrxtv\" (UniqueName: \"kubernetes.io/projected/efd6a0ef-6a5b-4308-933a-4751c3df6381-kube-api-access-qrxtv\") pod \"openstack-operator-index-qjcbk\" (UID: \"efd6a0ef-6a5b-4308-933a-4751c3df6381\") " pod="openstack-operators/openstack-operator-index-qjcbk" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.570187 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qjcbk" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.812552 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerStarted","Data":"b8bc36917b62b49ffb092cfef0e9cba6dca51aa8e8e1d51dd980862c977317be"} Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.812621 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-gnnv9" event={"ID":"7d68d329-fabf-44a2-a134-c5ccfe0ddf96","Type":"ContainerStarted","Data":"1749c5de52f853e3a7d235b68fb60129a08f1f22f9d5d86f9d5f0cd37fef388d"} Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.812784 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.836822 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-gnnv9" podStartSLOduration=6.477071313 podStartE2EDuration="15.836802266s" podCreationTimestamp="2025-10-03 13:05:50 +0000 UTC" firstStartedPulling="2025-10-03 13:05:50.871846125 +0000 UTC m=+947.081695181" lastFinishedPulling="2025-10-03 13:06:00.231577068 +0000 UTC m=+956.441426134" observedRunningTime="2025-10-03 13:06:05.832189953 +0000 UTC m=+962.042039029" watchObservedRunningTime="2025-10-03 13:06:05.836802266 +0000 UTC m=+962.046651332" Oct 03 13:06:05 crc kubenswrapper[4868]: I1003 13:06:05.974612 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-qjcbk"] Oct 03 13:06:06 crc kubenswrapper[4868]: I1003 13:06:06.819282 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qjcbk" event={"ID":"efd6a0ef-6a5b-4308-933a-4751c3df6381","Type":"ContainerStarted","Data":"b1eb20a8003ff2c19df33dabcdef1ae7fe2f975dabe2354fccf156272cbcdf5b"} Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.017634 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qjcbk"] Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.626873 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-x22cd"] Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.628303 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.632888 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-x22cd"] Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.720202 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lntk\" (UniqueName: \"kubernetes.io/projected/3e46757a-79d0-458e-88d7-c91c6661321a-kube-api-access-6lntk\") pod \"openstack-operator-index-x22cd\" (UID: \"3e46757a-79d0-458e-88d7-c91c6661321a\") " pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.821294 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lntk\" (UniqueName: \"kubernetes.io/projected/3e46757a-79d0-458e-88d7-c91c6661321a-kube-api-access-6lntk\") pod \"openstack-operator-index-x22cd\" (UID: \"3e46757a-79d0-458e-88d7-c91c6661321a\") " pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.839725 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lntk\" (UniqueName: \"kubernetes.io/projected/3e46757a-79d0-458e-88d7-c91c6661321a-kube-api-access-6lntk\") pod \"openstack-operator-index-x22cd\" (UID: \"3e46757a-79d0-458e-88d7-c91c6661321a\") " pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:08 crc kubenswrapper[4868]: I1003 13:06:08.956486 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:09 crc kubenswrapper[4868]: I1003 13:06:09.345781 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-x22cd"] Oct 03 13:06:09 crc kubenswrapper[4868]: I1003 13:06:09.836976 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x22cd" event={"ID":"3e46757a-79d0-458e-88d7-c91c6661321a","Type":"ContainerStarted","Data":"d2d4b027bb3464545c0233118b6499031a2c1fedce19c84f0ca701e60427be95"} Oct 03 13:06:10 crc kubenswrapper[4868]: I1003 13:06:10.691180 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:06:10 crc kubenswrapper[4868]: I1003 13:06:10.705269 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-64bf5d555-vhc7k" Oct 03 13:06:10 crc kubenswrapper[4868]: I1003 13:06:10.728010 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:06:16 crc kubenswrapper[4868]: I1003 13:06:16.878736 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-x22cd" event={"ID":"3e46757a-79d0-458e-88d7-c91c6661321a","Type":"ContainerStarted","Data":"38505b7348e907979c001515b8206f75cb56991bae336e1fabf536972748b4ec"} Oct 03 13:06:16 crc kubenswrapper[4868]: I1003 13:06:16.880245 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qjcbk" event={"ID":"efd6a0ef-6a5b-4308-933a-4751c3df6381","Type":"ContainerStarted","Data":"1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48"} Oct 03 13:06:16 crc kubenswrapper[4868]: I1003 13:06:16.880334 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-qjcbk" podUID="efd6a0ef-6a5b-4308-933a-4751c3df6381" containerName="registry-server" containerID="cri-o://1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48" gracePeriod=2 Oct 03 13:06:16 crc kubenswrapper[4868]: I1003 13:06:16.902420 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-x22cd" podStartSLOduration=2.4259500960000002 podStartE2EDuration="8.902398011s" podCreationTimestamp="2025-10-03 13:06:08 +0000 UTC" firstStartedPulling="2025-10-03 13:06:09.397575096 +0000 UTC m=+965.607424162" lastFinishedPulling="2025-10-03 13:06:15.874023011 +0000 UTC m=+972.083872077" observedRunningTime="2025-10-03 13:06:16.895860956 +0000 UTC m=+973.105710022" watchObservedRunningTime="2025-10-03 13:06:16.902398011 +0000 UTC m=+973.112247077" Oct 03 13:06:16 crc kubenswrapper[4868]: I1003 13:06:16.917007 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-qjcbk" podStartSLOduration=2.02528036 podStartE2EDuration="11.916987463s" podCreationTimestamp="2025-10-03 13:06:05 +0000 UTC" firstStartedPulling="2025-10-03 13:06:05.979975625 +0000 UTC m=+962.189824691" lastFinishedPulling="2025-10-03 13:06:15.871682728 +0000 UTC m=+972.081531794" observedRunningTime="2025-10-03 13:06:16.915333269 +0000 UTC m=+973.125182335" watchObservedRunningTime="2025-10-03 13:06:16.916987463 +0000 UTC m=+973.126836519" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.250300 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qjcbk" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.335040 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qrxtv\" (UniqueName: \"kubernetes.io/projected/efd6a0ef-6a5b-4308-933a-4751c3df6381-kube-api-access-qrxtv\") pod \"efd6a0ef-6a5b-4308-933a-4751c3df6381\" (UID: \"efd6a0ef-6a5b-4308-933a-4751c3df6381\") " Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.340559 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efd6a0ef-6a5b-4308-933a-4751c3df6381-kube-api-access-qrxtv" (OuterVolumeSpecName: "kube-api-access-qrxtv") pod "efd6a0ef-6a5b-4308-933a-4751c3df6381" (UID: "efd6a0ef-6a5b-4308-933a-4751c3df6381"). InnerVolumeSpecName "kube-api-access-qrxtv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.436514 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qrxtv\" (UniqueName: \"kubernetes.io/projected/efd6a0ef-6a5b-4308-933a-4751c3df6381-kube-api-access-qrxtv\") on node \"crc\" DevicePath \"\"" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.889351 4868 generic.go:334] "Generic (PLEG): container finished" podID="efd6a0ef-6a5b-4308-933a-4751c3df6381" containerID="1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48" exitCode=0 Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.889470 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-qjcbk" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.889696 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qjcbk" event={"ID":"efd6a0ef-6a5b-4308-933a-4751c3df6381","Type":"ContainerDied","Data":"1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48"} Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.889754 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-qjcbk" event={"ID":"efd6a0ef-6a5b-4308-933a-4751c3df6381","Type":"ContainerDied","Data":"b1eb20a8003ff2c19df33dabcdef1ae7fe2f975dabe2354fccf156272cbcdf5b"} Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.889775 4868 scope.go:117] "RemoveContainer" containerID="1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.914686 4868 scope.go:117] "RemoveContainer" containerID="1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48" Oct 03 13:06:17 crc kubenswrapper[4868]: E1003 13:06:17.915404 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48\": container with ID starting with 1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48 not found: ID does not exist" containerID="1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.915453 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48"} err="failed to get container status \"1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48\": rpc error: code = NotFound desc = could not find container \"1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48\": container with ID starting with 1d4e1edcf3337e392292bd47968dd46921b43a8177039eeb23653f45f3190a48 not found: ID does not exist" Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.921786 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-qjcbk"] Oct 03 13:06:17 crc kubenswrapper[4868]: I1003 13:06:17.925138 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-qjcbk"] Oct 03 13:06:18 crc kubenswrapper[4868]: I1003 13:06:18.556660 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efd6a0ef-6a5b-4308-933a-4751c3df6381" path="/var/lib/kubelet/pods/efd6a0ef-6a5b-4308-933a-4751c3df6381/volumes" Oct 03 13:06:18 crc kubenswrapper[4868]: I1003 13:06:18.957760 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:18 crc kubenswrapper[4868]: I1003 13:06:18.958493 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:18 crc kubenswrapper[4868]: I1003 13:06:18.983578 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:20 crc kubenswrapper[4868]: I1003 13:06:20.692684 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-gnnv9" Oct 03 13:06:28 crc kubenswrapper[4868]: I1003 13:06:28.996809 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-x22cd" Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.145713 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.146128 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.146177 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.146752 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9f15934458284405926073ad6c96722b605797c6a92fa9cf32b28fa47b81ce6a"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.146806 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://9f15934458284405926073ad6c96722b605797c6a92fa9cf32b28fa47b81ce6a" gracePeriod=600 Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.983848 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="9f15934458284405926073ad6c96722b605797c6a92fa9cf32b28fa47b81ce6a" exitCode=0 Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.983891 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"9f15934458284405926073ad6c96722b605797c6a92fa9cf32b28fa47b81ce6a"} Oct 03 13:06:32 crc kubenswrapper[4868]: I1003 13:06:32.984265 4868 scope.go:117] "RemoveContainer" containerID="c81c70808e3db2055985906ff5605af908c411acad06f21116ad2fb658245d13" Oct 03 13:06:33 crc kubenswrapper[4868]: I1003 13:06:33.995824 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"7d461164a3dcc0127d9bde036985d70792014962ce61b7453395cd3dd71f6a6c"} Oct 03 13:06:38 crc kubenswrapper[4868]: I1003 13:06:38.959858 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7"] Oct 03 13:06:38 crc kubenswrapper[4868]: E1003 13:06:38.960541 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="efd6a0ef-6a5b-4308-933a-4751c3df6381" containerName="registry-server" Oct 03 13:06:38 crc kubenswrapper[4868]: I1003 13:06:38.960554 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="efd6a0ef-6a5b-4308-933a-4751c3df6381" containerName="registry-server" Oct 03 13:06:38 crc kubenswrapper[4868]: I1003 13:06:38.960661 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="efd6a0ef-6a5b-4308-933a-4751c3df6381" containerName="registry-server" Oct 03 13:06:38 crc kubenswrapper[4868]: I1003 13:06:38.961472 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:38 crc kubenswrapper[4868]: I1003 13:06:38.963967 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-2t2q6" Oct 03 13:06:38 crc kubenswrapper[4868]: I1003 13:06:38.976432 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7"] Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.028248 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-bundle\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.028330 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8b262\" (UniqueName: \"kubernetes.io/projected/f93542ca-2418-4cd7-ade7-78a83fab6088-kube-api-access-8b262\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.028367 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-util\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.129979 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-bundle\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.130089 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8b262\" (UniqueName: \"kubernetes.io/projected/f93542ca-2418-4cd7-ade7-78a83fab6088-kube-api-access-8b262\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.130129 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-util\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.130694 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-util\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.130694 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-bundle\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.150003 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8b262\" (UniqueName: \"kubernetes.io/projected/f93542ca-2418-4cd7-ade7-78a83fab6088-kube-api-access-8b262\") pod \"6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.277359 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:39 crc kubenswrapper[4868]: I1003 13:06:39.702026 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7"] Oct 03 13:06:40 crc kubenswrapper[4868]: I1003 13:06:40.028315 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" event={"ID":"f93542ca-2418-4cd7-ade7-78a83fab6088","Type":"ContainerStarted","Data":"44f0bd595a9c1d02dc73803411114509d6266385c01000f5ea519d06ed480a86"} Oct 03 13:06:40 crc kubenswrapper[4868]: I1003 13:06:40.028768 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" event={"ID":"f93542ca-2418-4cd7-ade7-78a83fab6088","Type":"ContainerStarted","Data":"aacfe64af75fce56dc247eca40a69c69d1401850a16dfa560abb7a9d1dd759fc"} Oct 03 13:06:41 crc kubenswrapper[4868]: I1003 13:06:41.035124 4868 generic.go:334] "Generic (PLEG): container finished" podID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerID="44f0bd595a9c1d02dc73803411114509d6266385c01000f5ea519d06ed480a86" exitCode=0 Oct 03 13:06:41 crc kubenswrapper[4868]: I1003 13:06:41.035198 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" event={"ID":"f93542ca-2418-4cd7-ade7-78a83fab6088","Type":"ContainerDied","Data":"44f0bd595a9c1d02dc73803411114509d6266385c01000f5ea519d06ed480a86"} Oct 03 13:06:43 crc kubenswrapper[4868]: I1003 13:06:43.059075 4868 generic.go:334] "Generic (PLEG): container finished" podID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerID="08b9121ffabfec3acb88f57cc3048765637d7faa6c881769fdc52c8c318aabc2" exitCode=0 Oct 03 13:06:43 crc kubenswrapper[4868]: I1003 13:06:43.059627 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" event={"ID":"f93542ca-2418-4cd7-ade7-78a83fab6088","Type":"ContainerDied","Data":"08b9121ffabfec3acb88f57cc3048765637d7faa6c881769fdc52c8c318aabc2"} Oct 03 13:06:44 crc kubenswrapper[4868]: I1003 13:06:44.067833 4868 generic.go:334] "Generic (PLEG): container finished" podID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerID="47af0464fcdbf30bddfe55930550346929b1d00e77ba0365e07ed586af8a8de4" exitCode=0 Oct 03 13:06:44 crc kubenswrapper[4868]: I1003 13:06:44.067910 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" event={"ID":"f93542ca-2418-4cd7-ade7-78a83fab6088","Type":"ContainerDied","Data":"47af0464fcdbf30bddfe55930550346929b1d00e77ba0365e07ed586af8a8de4"} Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.290291 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.407937 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-util\") pod \"f93542ca-2418-4cd7-ade7-78a83fab6088\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.408411 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8b262\" (UniqueName: \"kubernetes.io/projected/f93542ca-2418-4cd7-ade7-78a83fab6088-kube-api-access-8b262\") pod \"f93542ca-2418-4cd7-ade7-78a83fab6088\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.408454 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-bundle\") pod \"f93542ca-2418-4cd7-ade7-78a83fab6088\" (UID: \"f93542ca-2418-4cd7-ade7-78a83fab6088\") " Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.409416 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-bundle" (OuterVolumeSpecName: "bundle") pod "f93542ca-2418-4cd7-ade7-78a83fab6088" (UID: "f93542ca-2418-4cd7-ade7-78a83fab6088"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.414508 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f93542ca-2418-4cd7-ade7-78a83fab6088-kube-api-access-8b262" (OuterVolumeSpecName: "kube-api-access-8b262") pod "f93542ca-2418-4cd7-ade7-78a83fab6088" (UID: "f93542ca-2418-4cd7-ade7-78a83fab6088"). InnerVolumeSpecName "kube-api-access-8b262". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.510084 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8b262\" (UniqueName: \"kubernetes.io/projected/f93542ca-2418-4cd7-ade7-78a83fab6088-kube-api-access-8b262\") on node \"crc\" DevicePath \"\"" Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.510127 4868 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.791385 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-util" (OuterVolumeSpecName: "util") pod "f93542ca-2418-4cd7-ade7-78a83fab6088" (UID: "f93542ca-2418-4cd7-ade7-78a83fab6088"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:06:45 crc kubenswrapper[4868]: I1003 13:06:45.813398 4868 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/f93542ca-2418-4cd7-ade7-78a83fab6088-util\") on node \"crc\" DevicePath \"\"" Oct 03 13:06:46 crc kubenswrapper[4868]: I1003 13:06:46.081359 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" event={"ID":"f93542ca-2418-4cd7-ade7-78a83fab6088","Type":"ContainerDied","Data":"aacfe64af75fce56dc247eca40a69c69d1401850a16dfa560abb7a9d1dd759fc"} Oct 03 13:06:46 crc kubenswrapper[4868]: I1003 13:06:46.081992 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aacfe64af75fce56dc247eca40a69c69d1401850a16dfa560abb7a9d1dd759fc" Oct 03 13:06:46 crc kubenswrapper[4868]: I1003 13:06:46.081422 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.901232 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl"] Oct 03 13:06:49 crc kubenswrapper[4868]: E1003 13:06:49.903518 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerName="util" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.903781 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerName="util" Oct 03 13:06:49 crc kubenswrapper[4868]: E1003 13:06:49.903933 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerName="pull" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.904126 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerName="pull" Oct 03 13:06:49 crc kubenswrapper[4868]: E1003 13:06:49.904325 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerName="extract" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.904447 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerName="extract" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.904783 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="f93542ca-2418-4cd7-ade7-78a83fab6088" containerName="extract" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.905932 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.908777 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-rdrkg" Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.936706 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl"] Oct 03 13:06:49 crc kubenswrapper[4868]: I1003 13:06:49.977525 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h756f\" (UniqueName: \"kubernetes.io/projected/1435bf3e-adba-43bb-97b4-2caea4a8c4c8-kube-api-access-h756f\") pod \"openstack-operator-controller-operator-764f84468b-lbhnl\" (UID: \"1435bf3e-adba-43bb-97b4-2caea4a8c4c8\") " pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" Oct 03 13:06:50 crc kubenswrapper[4868]: I1003 13:06:50.078374 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h756f\" (UniqueName: \"kubernetes.io/projected/1435bf3e-adba-43bb-97b4-2caea4a8c4c8-kube-api-access-h756f\") pod \"openstack-operator-controller-operator-764f84468b-lbhnl\" (UID: \"1435bf3e-adba-43bb-97b4-2caea4a8c4c8\") " pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" Oct 03 13:06:50 crc kubenswrapper[4868]: I1003 13:06:50.107518 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h756f\" (UniqueName: \"kubernetes.io/projected/1435bf3e-adba-43bb-97b4-2caea4a8c4c8-kube-api-access-h756f\") pod \"openstack-operator-controller-operator-764f84468b-lbhnl\" (UID: \"1435bf3e-adba-43bb-97b4-2caea4a8c4c8\") " pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" Oct 03 13:06:50 crc kubenswrapper[4868]: I1003 13:06:50.229113 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" Oct 03 13:06:50 crc kubenswrapper[4868]: I1003 13:06:50.686176 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl"] Oct 03 13:06:50 crc kubenswrapper[4868]: W1003 13:06:50.699210 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1435bf3e_adba_43bb_97b4_2caea4a8c4c8.slice/crio-76650e1ad3f98796687bb6926cfc8421734d828a76fbe9d4997a8efd2f3b4c30 WatchSource:0}: Error finding container 76650e1ad3f98796687bb6926cfc8421734d828a76fbe9d4997a8efd2f3b4c30: Status 404 returned error can't find the container with id 76650e1ad3f98796687bb6926cfc8421734d828a76fbe9d4997a8efd2f3b4c30 Oct 03 13:06:51 crc kubenswrapper[4868]: I1003 13:06:51.114427 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" event={"ID":"1435bf3e-adba-43bb-97b4-2caea4a8c4c8","Type":"ContainerStarted","Data":"76650e1ad3f98796687bb6926cfc8421734d828a76fbe9d4997a8efd2f3b4c30"} Oct 03 13:06:55 crc kubenswrapper[4868]: I1003 13:06:55.148280 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" event={"ID":"1435bf3e-adba-43bb-97b4-2caea4a8c4c8","Type":"ContainerStarted","Data":"480e652b575d8912429a7e8714ed47972bfa4071b434114417c3269c992a2d27"} Oct 03 13:06:58 crc kubenswrapper[4868]: I1003 13:06:58.174598 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" event={"ID":"1435bf3e-adba-43bb-97b4-2caea4a8c4c8","Type":"ContainerStarted","Data":"c890336ea87df127ae7e507b2049363eb60d85b54c5bb0e4b2e86d5f808695ea"} Oct 03 13:06:58 crc kubenswrapper[4868]: I1003 13:06:58.175129 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" Oct 03 13:06:58 crc kubenswrapper[4868]: I1003 13:06:58.212024 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" podStartSLOduration=2.893738928 podStartE2EDuration="9.21199409s" podCreationTimestamp="2025-10-03 13:06:49 +0000 UTC" firstStartedPulling="2025-10-03 13:06:50.708381465 +0000 UTC m=+1006.918230531" lastFinishedPulling="2025-10-03 13:06:57.026636627 +0000 UTC m=+1013.236485693" observedRunningTime="2025-10-03 13:06:58.207278244 +0000 UTC m=+1014.417127330" watchObservedRunningTime="2025-10-03 13:06:58.21199409 +0000 UTC m=+1014.421843156" Oct 03 13:06:59 crc kubenswrapper[4868]: I1003 13:06:59.182880 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-764f84468b-lbhnl" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.184094 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.185575 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.187318 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-jl76r" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.195649 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.196839 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.199691 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-6n7mz" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.200780 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.213238 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.214228 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.217234 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-j2tmh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.236283 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.242891 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.251504 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrdw2\" (UniqueName: \"kubernetes.io/projected/9efa809d-5837-4900-a456-84edfb2ba501-kube-api-access-zrdw2\") pod \"cinder-operator-controller-manager-79d68d6c85-6pz2t\" (UID: \"9efa809d-5837-4900-a456-84edfb2ba501\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.251590 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ktk5w\" (UniqueName: \"kubernetes.io/projected/3d34922b-c3d5-4795-be9c-a39e2542f42d-kube-api-access-ktk5w\") pod \"designate-operator-controller-manager-75dfd9b554-nmp74\" (UID: \"3d34922b-c3d5-4795-be9c-a39e2542f42d\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.251620 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jxpd6\" (UniqueName: \"kubernetes.io/projected/5c06b85f-d6da-4e5f-a817-f01a18b0217c-kube-api-access-jxpd6\") pod \"barbican-operator-controller-manager-6c675fb79f-82mnx\" (UID: \"5c06b85f-d6da-4e5f-a817-f01a18b0217c\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.262174 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-txz24"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.263506 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.265985 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-lctlw" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.269764 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-r8szh"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.270914 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.273280 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-gmgzb" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.288659 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-r8szh"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.298170 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-txz24"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.322026 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.323186 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.327448 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-lch4s" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.352446 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.353419 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jxpd6\" (UniqueName: \"kubernetes.io/projected/5c06b85f-d6da-4e5f-a817-f01a18b0217c-kube-api-access-jxpd6\") pod \"barbican-operator-controller-manager-6c675fb79f-82mnx\" (UID: \"5c06b85f-d6da-4e5f-a817-f01a18b0217c\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.353455 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ktk5w\" (UniqueName: \"kubernetes.io/projected/3d34922b-c3d5-4795-be9c-a39e2542f42d-kube-api-access-ktk5w\") pod \"designate-operator-controller-manager-75dfd9b554-nmp74\" (UID: \"3d34922b-c3d5-4795-be9c-a39e2542f42d\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.353482 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8nbw9\" (UniqueName: \"kubernetes.io/projected/ed9b7dc9-9145-42db-bed4-c4cf3f22c07f-kube-api-access-8nbw9\") pod \"horizon-operator-controller-manager-6769b867d9-8dh82\" (UID: \"ed9b7dc9-9145-42db-bed4-c4cf3f22c07f\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.353504 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhx2p\" (UniqueName: \"kubernetes.io/projected/a9341a61-ad61-4ab0-8056-fea9a2e0644e-kube-api-access-lhx2p\") pod \"heat-operator-controller-manager-599898f689-r8szh\" (UID: \"a9341a61-ad61-4ab0-8056-fea9a2e0644e\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.353531 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cg22l\" (UniqueName: \"kubernetes.io/projected/ac74b0fc-4221-46e6-b88a-f9bd4a484952-kube-api-access-cg22l\") pod \"glance-operator-controller-manager-846dff85b5-txz24\" (UID: \"ac74b0fc-4221-46e6-b88a-f9bd4a484952\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.353600 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrdw2\" (UniqueName: \"kubernetes.io/projected/9efa809d-5837-4900-a456-84edfb2ba501-kube-api-access-zrdw2\") pod \"cinder-operator-controller-manager-79d68d6c85-6pz2t\" (UID: \"9efa809d-5837-4900-a456-84edfb2ba501\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.377542 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.384068 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.394687 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jxpd6\" (UniqueName: \"kubernetes.io/projected/5c06b85f-d6da-4e5f-a817-f01a18b0217c-kube-api-access-jxpd6\") pod \"barbican-operator-controller-manager-6c675fb79f-82mnx\" (UID: \"5c06b85f-d6da-4e5f-a817-f01a18b0217c\") " pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.394950 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrdw2\" (UniqueName: \"kubernetes.io/projected/9efa809d-5837-4900-a456-84edfb2ba501-kube-api-access-zrdw2\") pod \"cinder-operator-controller-manager-79d68d6c85-6pz2t\" (UID: \"9efa809d-5837-4900-a456-84edfb2ba501\") " pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.395256 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-96x5g" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.398082 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ktk5w\" (UniqueName: \"kubernetes.io/projected/3d34922b-c3d5-4795-be9c-a39e2542f42d-kube-api-access-ktk5w\") pod \"designate-operator-controller-manager-75dfd9b554-nmp74\" (UID: \"3d34922b-c3d5-4795-be9c-a39e2542f42d\") " pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.401585 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.402782 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.409582 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.409941 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.410074 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-5b4hn" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.422635 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.424012 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.427252 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-j2trq" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.454359 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.454422 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-48shq\" (UniqueName: \"kubernetes.io/projected/96f01adb-73f9-45c4-bf04-677ffa2942e2-kube-api-access-48shq\") pod \"ironic-operator-controller-manager-84bc9db6cc-lsdv9\" (UID: \"96f01adb-73f9-45c4-bf04-677ffa2942e2\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.454456 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kfdr4\" (UniqueName: \"kubernetes.io/projected/6a879dee-0e96-4658-b0b2-ddfa08037b88-kube-api-access-kfdr4\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.454516 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8nbw9\" (UniqueName: \"kubernetes.io/projected/ed9b7dc9-9145-42db-bed4-c4cf3f22c07f-kube-api-access-8nbw9\") pod \"horizon-operator-controller-manager-6769b867d9-8dh82\" (UID: \"ed9b7dc9-9145-42db-bed4-c4cf3f22c07f\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.454545 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhx2p\" (UniqueName: \"kubernetes.io/projected/a9341a61-ad61-4ab0-8056-fea9a2e0644e-kube-api-access-lhx2p\") pod \"heat-operator-controller-manager-599898f689-r8szh\" (UID: \"a9341a61-ad61-4ab0-8056-fea9a2e0644e\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.454577 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cg22l\" (UniqueName: \"kubernetes.io/projected/ac74b0fc-4221-46e6-b88a-f9bd4a484952-kube-api-access-cg22l\") pod \"glance-operator-controller-manager-846dff85b5-txz24\" (UID: \"ac74b0fc-4221-46e6-b88a-f9bd4a484952\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.454637 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ggm4\" (UniqueName: \"kubernetes.io/projected/f125746c-8bbb-499e-95e0-2dd8071d914e-kube-api-access-7ggm4\") pod \"keystone-operator-controller-manager-7f55849f88-mql26\" (UID: \"f125746c-8bbb-499e-95e0-2dd8071d914e\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.455311 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.467389 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.485748 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhx2p\" (UniqueName: \"kubernetes.io/projected/a9341a61-ad61-4ab0-8056-fea9a2e0644e-kube-api-access-lhx2p\") pod \"heat-operator-controller-manager-599898f689-r8szh\" (UID: \"a9341a61-ad61-4ab0-8056-fea9a2e0644e\") " pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.494250 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.495611 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.496962 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8nbw9\" (UniqueName: \"kubernetes.io/projected/ed9b7dc9-9145-42db-bed4-c4cf3f22c07f-kube-api-access-8nbw9\") pod \"horizon-operator-controller-manager-6769b867d9-8dh82\" (UID: \"ed9b7dc9-9145-42db-bed4-c4cf3f22c07f\") " pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.503699 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.504044 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cg22l\" (UniqueName: \"kubernetes.io/projected/ac74b0fc-4221-46e6-b88a-f9bd4a484952-kube-api-access-cg22l\") pod \"glance-operator-controller-manager-846dff85b5-txz24\" (UID: \"ac74b0fc-4221-46e6-b88a-f9bd4a484952\") " pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.512646 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-5jr88" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.513552 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.522182 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.531584 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.542145 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.544345 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.547300 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-q47ps" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.562212 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btkpv\" (UniqueName: \"kubernetes.io/projected/10842e9e-e075-4399-88c9-96df14bf7959-kube-api-access-btkpv\") pod \"manila-operator-controller-manager-6fd6854b49-fvf5f\" (UID: \"10842e9e-e075-4399-88c9-96df14bf7959\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.562338 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pm444\" (UniqueName: \"kubernetes.io/projected/1121fe0b-eb0d-43f2-b503-85a3a3601c7e-kube-api-access-pm444\") pod \"mariadb-operator-controller-manager-5c468bf4d4-lsvj8\" (UID: \"1121fe0b-eb0d-43f2-b503-85a3a3601c7e\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.562463 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ggm4\" (UniqueName: \"kubernetes.io/projected/f125746c-8bbb-499e-95e0-2dd8071d914e-kube-api-access-7ggm4\") pod \"keystone-operator-controller-manager-7f55849f88-mql26\" (UID: \"f125746c-8bbb-499e-95e0-2dd8071d914e\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.562544 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.562594 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-48shq\" (UniqueName: \"kubernetes.io/projected/96f01adb-73f9-45c4-bf04-677ffa2942e2-kube-api-access-48shq\") pod \"ironic-operator-controller-manager-84bc9db6cc-lsdv9\" (UID: \"96f01adb-73f9-45c4-bf04-677ffa2942e2\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.562626 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kfdr4\" (UniqueName: \"kubernetes.io/projected/6a879dee-0e96-4658-b0b2-ddfa08037b88-kube-api-access-kfdr4\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:24 crc kubenswrapper[4868]: E1003 13:07:24.566575 4868 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 03 13:07:24 crc kubenswrapper[4868]: E1003 13:07:24.566629 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert podName:6a879dee-0e96-4658-b0b2-ddfa08037b88 nodeName:}" failed. No retries permitted until 2025-10-03 13:07:25.066609689 +0000 UTC m=+1041.276458745 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert") pod "infra-operator-controller-manager-5fbf469cd7-m4cfh" (UID: "6a879dee-0e96-4658-b0b2-ddfa08037b88") : secret "infra-operator-webhook-server-cert" not found Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.567725 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.567762 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.569931 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.572899 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-zrf6r" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.588177 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.599217 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.600449 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ggm4\" (UniqueName: \"kubernetes.io/projected/f125746c-8bbb-499e-95e0-2dd8071d914e-kube-api-access-7ggm4\") pod \"keystone-operator-controller-manager-7f55849f88-mql26\" (UID: \"f125746c-8bbb-499e-95e0-2dd8071d914e\") " pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.604006 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kfdr4\" (UniqueName: \"kubernetes.io/projected/6a879dee-0e96-4658-b0b2-ddfa08037b88-kube-api-access-kfdr4\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.605583 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.606979 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.608586 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-48shq\" (UniqueName: \"kubernetes.io/projected/96f01adb-73f9-45c4-bf04-677ffa2942e2-kube-api-access-48shq\") pod \"ironic-operator-controller-manager-84bc9db6cc-lsdv9\" (UID: \"96f01adb-73f9-45c4-bf04-677ffa2942e2\") " pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.617109 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.617390 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.630149 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.634380 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.634512 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.635796 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-tkpmf" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.635918 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-sqdgg" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.652004 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.655842 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.657364 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.659633 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-wgc8d" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.663432 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btkpv\" (UniqueName: \"kubernetes.io/projected/10842e9e-e075-4399-88c9-96df14bf7959-kube-api-access-btkpv\") pod \"manila-operator-controller-manager-6fd6854b49-fvf5f\" (UID: \"10842e9e-e075-4399-88c9-96df14bf7959\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.663495 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4rs4\" (UniqueName: \"kubernetes.io/projected/4a104edd-a22b-4767-8124-0e1a0e87a999-kube-api-access-v4rs4\") pod \"nova-operator-controller-manager-555c7456bd-jkjfz\" (UID: \"4a104edd-a22b-4767-8124-0e1a0e87a999\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.663520 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qrpwq\" (UniqueName: \"kubernetes.io/projected/756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22-kube-api-access-qrpwq\") pod \"octavia-operator-controller-manager-59d6cfdf45-8j6dq\" (UID: \"756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.663543 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pm444\" (UniqueName: \"kubernetes.io/projected/1121fe0b-eb0d-43f2-b503-85a3a3601c7e-kube-api-access-pm444\") pod \"mariadb-operator-controller-manager-5c468bf4d4-lsvj8\" (UID: \"1121fe0b-eb0d-43f2-b503-85a3a3601c7e\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.663625 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wl29c\" (UniqueName: \"kubernetes.io/projected/1f467387-a3f8-4b5b-af79-14eaf2bf799a-kube-api-access-wl29c\") pod \"neutron-operator-controller-manager-6574bf987d-t7vpl\" (UID: \"1f467387-a3f8-4b5b-af79-14eaf2bf799a\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.671522 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.674015 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.682108 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.684923 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.693652 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.693907 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-tfpz4" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.694825 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-xnznn" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.700672 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.706112 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pm444\" (UniqueName: \"kubernetes.io/projected/1121fe0b-eb0d-43f2-b503-85a3a3601c7e-kube-api-access-pm444\") pod \"mariadb-operator-controller-manager-5c468bf4d4-lsvj8\" (UID: \"1121fe0b-eb0d-43f2-b503-85a3a3601c7e\") " pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.707222 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btkpv\" (UniqueName: \"kubernetes.io/projected/10842e9e-e075-4399-88c9-96df14bf7959-kube-api-access-btkpv\") pod \"manila-operator-controller-manager-6fd6854b49-fvf5f\" (UID: \"10842e9e-e075-4399-88c9-96df14bf7959\") " pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.712526 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.722035 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.724426 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.727731 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.729292 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-tctsr" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.769800 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.790752 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.798127 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4rs4\" (UniqueName: \"kubernetes.io/projected/4a104edd-a22b-4767-8124-0e1a0e87a999-kube-api-access-v4rs4\") pod \"nova-operator-controller-manager-555c7456bd-jkjfz\" (UID: \"4a104edd-a22b-4767-8124-0e1a0e87a999\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.798187 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qrpwq\" (UniqueName: \"kubernetes.io/projected/756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22-kube-api-access-qrpwq\") pod \"octavia-operator-controller-manager-59d6cfdf45-8j6dq\" (UID: \"756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.798496 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wl29c\" (UniqueName: \"kubernetes.io/projected/1f467387-a3f8-4b5b-af79-14eaf2bf799a-kube-api-access-wl29c\") pod \"neutron-operator-controller-manager-6574bf987d-t7vpl\" (UID: \"1f467387-a3f8-4b5b-af79-14eaf2bf799a\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.845721 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.855043 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wl29c\" (UniqueName: \"kubernetes.io/projected/1f467387-a3f8-4b5b-af79-14eaf2bf799a-kube-api-access-wl29c\") pod \"neutron-operator-controller-manager-6574bf987d-t7vpl\" (UID: \"1f467387-a3f8-4b5b-af79-14eaf2bf799a\") " pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.855944 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.860372 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qrpwq\" (UniqueName: \"kubernetes.io/projected/756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22-kube-api-access-qrpwq\") pod \"octavia-operator-controller-manager-59d6cfdf45-8j6dq\" (UID: \"756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22\") " pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.860949 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4rs4\" (UniqueName: \"kubernetes.io/projected/4a104edd-a22b-4767-8124-0e1a0e87a999-kube-api-access-v4rs4\") pod \"nova-operator-controller-manager-555c7456bd-jkjfz\" (UID: \"4a104edd-a22b-4767-8124-0e1a0e87a999\") " pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.895385 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.896404 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.905492 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8m2gf\" (UniqueName: \"kubernetes.io/projected/72d193fe-4b1e-4c77-bda6-a44e1a8318b6-kube-api-access-8m2gf\") pod \"placement-operator-controller-manager-7d8bb7f44c-49swg\" (UID: \"72d193fe-4b1e-4c77-bda6-a44e1a8318b6\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.905536 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxq52\" (UniqueName: \"kubernetes.io/projected/546f0324-867d-4a32-a8c2-5e72d95aff3b-kube-api-access-bxq52\") pod \"ovn-operator-controller-manager-688db7b6c7-wbsqt\" (UID: \"546f0324-867d-4a32-a8c2-5e72d95aff3b\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.905561 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-djn6k\" (UniqueName: \"kubernetes.io/projected/3f5c7d09-45c2-42bf-b441-70fc16504141-kube-api-access-djn6k\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.905646 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hfprj\" (UniqueName: \"kubernetes.io/projected/5dbc41e1-0980-4e30-9531-233266c50aca-kube-api-access-hfprj\") pod \"swift-operator-controller-manager-6859f9b676-j7pqs\" (UID: \"5dbc41e1-0980-4e30-9531-233266c50aca\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.905696 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.934444 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-qfvt6" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.935489 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.945297 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.955881 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.959105 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b"] Oct 03 13:07:24 crc kubenswrapper[4868]: I1003 13:07:24.971428 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.007599 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.010034 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.016681 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxq52\" (UniqueName: \"kubernetes.io/projected/546f0324-867d-4a32-a8c2-5e72d95aff3b-kube-api-access-bxq52\") pod \"ovn-operator-controller-manager-688db7b6c7-wbsqt\" (UID: \"546f0324-867d-4a32-a8c2-5e72d95aff3b\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.016723 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-djn6k\" (UniqueName: \"kubernetes.io/projected/3f5c7d09-45c2-42bf-b441-70fc16504141-kube-api-access-djn6k\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.016788 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hfprj\" (UniqueName: \"kubernetes.io/projected/5dbc41e1-0980-4e30-9531-233266c50aca-kube-api-access-hfprj\") pod \"swift-operator-controller-manager-6859f9b676-j7pqs\" (UID: \"5dbc41e1-0980-4e30-9531-233266c50aca\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.016839 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.016864 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6gv5j\" (UniqueName: \"kubernetes.io/projected/b8907927-2a99-491c-9305-08f86cb8525d-kube-api-access-6gv5j\") pod \"telemetry-operator-controller-manager-5db5cf686f-zlj2b\" (UID: \"b8907927-2a99-491c-9305-08f86cb8525d\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.016893 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8m2gf\" (UniqueName: \"kubernetes.io/projected/72d193fe-4b1e-4c77-bda6-a44e1a8318b6-kube-api-access-8m2gf\") pod \"placement-operator-controller-manager-7d8bb7f44c-49swg\" (UID: \"72d193fe-4b1e-4c77-bda6-a44e1a8318b6\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" Oct 03 13:07:25 crc kubenswrapper[4868]: E1003 13:07:25.017510 4868 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:07:25 crc kubenswrapper[4868]: E1003 13:07:25.017548 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert podName:3f5c7d09-45c2-42bf-b441-70fc16504141 nodeName:}" failed. No retries permitted until 2025-10-03 13:07:25.517535791 +0000 UTC m=+1041.727384857 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert") pod "openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" (UID: "3f5c7d09-45c2-42bf-b441-70fc16504141") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.017894 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-pxl8t" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.045973 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8m2gf\" (UniqueName: \"kubernetes.io/projected/72d193fe-4b1e-4c77-bda6-a44e1a8318b6-kube-api-access-8m2gf\") pod \"placement-operator-controller-manager-7d8bb7f44c-49swg\" (UID: \"72d193fe-4b1e-4c77-bda6-a44e1a8318b6\") " pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.052457 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-djn6k\" (UniqueName: \"kubernetes.io/projected/3f5c7d09-45c2-42bf-b441-70fc16504141-kube-api-access-djn6k\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.058884 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hfprj\" (UniqueName: \"kubernetes.io/projected/5dbc41e1-0980-4e30-9531-233266c50aca-kube-api-access-hfprj\") pod \"swift-operator-controller-manager-6859f9b676-j7pqs\" (UID: \"5dbc41e1-0980-4e30-9531-233266c50aca\") " pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.063407 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxq52\" (UniqueName: \"kubernetes.io/projected/546f0324-867d-4a32-a8c2-5e72d95aff3b-kube-api-access-bxq52\") pod \"ovn-operator-controller-manager-688db7b6c7-wbsqt\" (UID: \"546f0324-867d-4a32-a8c2-5e72d95aff3b\") " pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.063751 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.085967 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.119280 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.120229 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.120278 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcqfh\" (UniqueName: \"kubernetes.io/projected/52c85570-4e33-49d4-b6df-a65727f1df56-kube-api-access-zcqfh\") pod \"test-operator-controller-manager-5cd5cb47d7-zftwb\" (UID: \"52c85570-4e33-49d4-b6df-a65727f1df56\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.120339 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6gv5j\" (UniqueName: \"kubernetes.io/projected/b8907927-2a99-491c-9305-08f86cb8525d-kube-api-access-6gv5j\") pod \"telemetry-operator-controller-manager-5db5cf686f-zlj2b\" (UID: \"b8907927-2a99-491c-9305-08f86cb8525d\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" Oct 03 13:07:25 crc kubenswrapper[4868]: E1003 13:07:25.120741 4868 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Oct 03 13:07:25 crc kubenswrapper[4868]: E1003 13:07:25.120780 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert podName:6a879dee-0e96-4658-b0b2-ddfa08037b88 nodeName:}" failed. No retries permitted until 2025-10-03 13:07:26.120766412 +0000 UTC m=+1042.330615478 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert") pod "infra-operator-controller-manager-5fbf469cd7-m4cfh" (UID: "6a879dee-0e96-4658-b0b2-ddfa08037b88") : secret "infra-operator-webhook-server-cert" not found Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.144930 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6gv5j\" (UniqueName: \"kubernetes.io/projected/b8907927-2a99-491c-9305-08f86cb8525d-kube-api-access-6gv5j\") pod \"telemetry-operator-controller-manager-5db5cf686f-zlj2b\" (UID: \"b8907927-2a99-491c-9305-08f86cb8525d\") " pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.206371 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.207556 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.213329 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-kr8pj" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.221552 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcqfh\" (UniqueName: \"kubernetes.io/projected/52c85570-4e33-49d4-b6df-a65727f1df56-kube-api-access-zcqfh\") pod \"test-operator-controller-manager-5cd5cb47d7-zftwb\" (UID: \"52c85570-4e33-49d4-b6df-a65727f1df56\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.223142 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.268642 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcqfh\" (UniqueName: \"kubernetes.io/projected/52c85570-4e33-49d4-b6df-a65727f1df56-kube-api-access-zcqfh\") pod \"test-operator-controller-manager-5cd5cb47d7-zftwb\" (UID: \"52c85570-4e33-49d4-b6df-a65727f1df56\") " pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.277191 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.280668 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.298597 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.302762 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.305422 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-qchfl" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.306510 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.322440 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xlgr\" (UniqueName: \"kubernetes.io/projected/70e868cf-a7e6-4942-b88a-71cbd6a992af-kube-api-access-4xlgr\") pod \"watcher-operator-controller-manager-fcd7d9895-zhdvk\" (UID: \"70e868cf-a7e6-4942-b88a-71cbd6a992af\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.325937 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.366855 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.367724 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.372658 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-s6k2r" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.377241 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.387773 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.389474 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.397841 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.424883 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xlgr\" (UniqueName: \"kubernetes.io/projected/70e868cf-a7e6-4942-b88a-71cbd6a992af-kube-api-access-4xlgr\") pod \"watcher-operator-controller-manager-fcd7d9895-zhdvk\" (UID: \"70e868cf-a7e6-4942-b88a-71cbd6a992af\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.425006 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zl2c\" (UniqueName: \"kubernetes.io/projected/89d66ba3-fda2-467a-a2f2-402a8661155b-kube-api-access-9zl2c\") pod \"openstack-operator-controller-manager-5c4446bf96-6xvjc\" (UID: \"89d66ba3-fda2-467a-a2f2-402a8661155b\") " pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.425042 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89d66ba3-fda2-467a-a2f2-402a8661155b-cert\") pod \"openstack-operator-controller-manager-5c4446bf96-6xvjc\" (UID: \"89d66ba3-fda2-467a-a2f2-402a8661155b\") " pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.447601 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xlgr\" (UniqueName: \"kubernetes.io/projected/70e868cf-a7e6-4942-b88a-71cbd6a992af-kube-api-access-4xlgr\") pod \"watcher-operator-controller-manager-fcd7d9895-zhdvk\" (UID: \"70e868cf-a7e6-4942-b88a-71cbd6a992af\") " pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" Oct 03 13:07:25 crc kubenswrapper[4868]: W1003 13:07:25.499067 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3d34922b_c3d5_4795_be9c_a39e2542f42d.slice/crio-cbd6d2d53f926d420f1b81272156ddd7fa003c0f3b58b2b414fc1a6e619b5531 WatchSource:0}: Error finding container cbd6d2d53f926d420f1b81272156ddd7fa003c0f3b58b2b414fc1a6e619b5531: Status 404 returned error can't find the container with id cbd6d2d53f926d420f1b81272156ddd7fa003c0f3b58b2b414fc1a6e619b5531 Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.534492 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89d66ba3-fda2-467a-a2f2-402a8661155b-cert\") pod \"openstack-operator-controller-manager-5c4446bf96-6xvjc\" (UID: \"89d66ba3-fda2-467a-a2f2-402a8661155b\") " pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.534762 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dvf8v\" (UniqueName: \"kubernetes.io/projected/97a59cbd-cfe1-49ce-9774-fc9bc76a52b0-kube-api-access-dvf8v\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-69bv7\" (UID: \"97a59cbd-cfe1-49ce-9774-fc9bc76a52b0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.534862 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.534900 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zl2c\" (UniqueName: \"kubernetes.io/projected/89d66ba3-fda2-467a-a2f2-402a8661155b-kube-api-access-9zl2c\") pod \"openstack-operator-controller-manager-5c4446bf96-6xvjc\" (UID: \"89d66ba3-fda2-467a-a2f2-402a8661155b\") " pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: E1003 13:07:25.535591 4868 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:07:25 crc kubenswrapper[4868]: E1003 13:07:25.535669 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert podName:3f5c7d09-45c2-42bf-b441-70fc16504141 nodeName:}" failed. No retries permitted until 2025-10-03 13:07:26.535645667 +0000 UTC m=+1042.745494813 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert") pod "openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" (UID: "3f5c7d09-45c2-42bf-b441-70fc16504141") : secret "openstack-baremetal-operator-webhook-server-cert" not found Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.541404 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/89d66ba3-fda2-467a-a2f2-402a8661155b-cert\") pod \"openstack-operator-controller-manager-5c4446bf96-6xvjc\" (UID: \"89d66ba3-fda2-467a-a2f2-402a8661155b\") " pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.570929 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.571329 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zl2c\" (UniqueName: \"kubernetes.io/projected/89d66ba3-fda2-467a-a2f2-402a8661155b-kube-api-access-9zl2c\") pod \"openstack-operator-controller-manager-5c4446bf96-6xvjc\" (UID: \"89d66ba3-fda2-467a-a2f2-402a8661155b\") " pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.593159 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-846dff85b5-txz24"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.596988 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.636942 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dvf8v\" (UniqueName: \"kubernetes.io/projected/97a59cbd-cfe1-49ce-9774-fc9bc76a52b0-kube-api-access-dvf8v\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-69bv7\" (UID: \"97a59cbd-cfe1-49ce-9774-fc9bc76a52b0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.655955 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.656648 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dvf8v\" (UniqueName: \"kubernetes.io/projected/97a59cbd-cfe1-49ce-9774-fc9bc76a52b0-kube-api-access-dvf8v\") pod \"rabbitmq-cluster-operator-manager-5f97d8c699-69bv7\" (UID: \"97a59cbd-cfe1-49ce-9774-fc9bc76a52b0\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.796039 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9"] Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.840870 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-599898f689-r8szh"] Oct 03 13:07:25 crc kubenswrapper[4868]: W1003 13:07:25.899270 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9341a61_ad61_4ab0_8056_fea9a2e0644e.slice/crio-a1305cc5f109fb553df04ebdd8938fd4975fe3d6ce67f37f4de252cf2a8d72b6 WatchSource:0}: Error finding container a1305cc5f109fb553df04ebdd8938fd4975fe3d6ce67f37f4de252cf2a8d72b6: Status 404 returned error can't find the container with id a1305cc5f109fb553df04ebdd8938fd4975fe3d6ce67f37f4de252cf2a8d72b6 Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.908770 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" Oct 03 13:07:25 crc kubenswrapper[4868]: I1003 13:07:25.948995 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.145572 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.151749 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/6a879dee-0e96-4658-b0b2-ddfa08037b88-cert\") pod \"infra-operator-controller-manager-5fbf469cd7-m4cfh\" (UID: \"6a879dee-0e96-4658-b0b2-ddfa08037b88\") " pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.243805 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.272324 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26"] Oct 03 13:07:26 crc kubenswrapper[4868]: W1003 13:07:26.275779 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf125746c_8bbb_499e_95e0_2dd8071d914e.slice/crio-0a798cee11be6aeb8a813d7b8dad0545b4bb3300b7595cc441a4f8caabddd586 WatchSource:0}: Error finding container 0a798cee11be6aeb8a813d7b8dad0545b4bb3300b7595cc441a4f8caabddd586: Status 404 returned error can't find the container with id 0a798cee11be6aeb8a813d7b8dad0545b4bb3300b7595cc441a4f8caabddd586 Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.275986 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.359586 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.365423 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.371807 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.372213 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" event={"ID":"1121fe0b-eb0d-43f2-b503-85a3a3601c7e","Type":"ContainerStarted","Data":"45fecd0f08df5cac0e41e3c32b56c4bb18f75869a735671d1c5b3575f66578e3"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.377852 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" event={"ID":"ac74b0fc-4221-46e6-b88a-f9bd4a484952","Type":"ContainerStarted","Data":"6e3c67b632229f4c792e282a140dba10ba193173e3d3fc597902073f5bd0a9ef"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.379340 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" event={"ID":"ed9b7dc9-9145-42db-bed4-c4cf3f22c07f","Type":"ContainerStarted","Data":"ed8252acad3b299d0163f56cce2dbc83c0afb735fb613bbd3381e3cae7a57fad"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.381065 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" event={"ID":"a9341a61-ad61-4ab0-8056-fea9a2e0644e","Type":"ContainerStarted","Data":"a1305cc5f109fb553df04ebdd8938fd4975fe3d6ce67f37f4de252cf2a8d72b6"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.382507 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" event={"ID":"9efa809d-5837-4900-a456-84edfb2ba501","Type":"ContainerStarted","Data":"6fb2783645555db04bb751e80eeea440531fd0be5fd7e25989e307385bc3d70b"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.383535 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" event={"ID":"f125746c-8bbb-499e-95e0-2dd8071d914e","Type":"ContainerStarted","Data":"0a798cee11be6aeb8a813d7b8dad0545b4bb3300b7595cc441a4f8caabddd586"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.390289 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" event={"ID":"96f01adb-73f9-45c4-bf04-677ffa2942e2","Type":"ContainerStarted","Data":"60f8a496e774f2531b80de8b3af5d61c83a8dfd006b3ef0c37516dc830ec6fdd"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.391572 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.391627 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" event={"ID":"3d34922b-c3d5-4795-be9c-a39e2542f42d","Type":"ContainerStarted","Data":"cbd6d2d53f926d420f1b81272156ddd7fa003c0f3b58b2b414fc1a6e619b5531"} Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.393068 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" event={"ID":"5c06b85f-d6da-4e5f-a817-f01a18b0217c","Type":"ContainerStarted","Data":"757cf0ea7f8c32c62fb548594ebf288402e8284c47b2734f8fc686440851ffc6"} Oct 03 13:07:26 crc kubenswrapper[4868]: W1003 13:07:26.401044 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod52c85570_4e33_49d4_b6df_a65727f1df56.slice/crio-c665a8f9e5d8db91e42058d7d6ecb459972a73e8efb660e8d51dedc2c99c9afa WatchSource:0}: Error finding container c665a8f9e5d8db91e42058d7d6ecb459972a73e8efb660e8d51dedc2c99c9afa: Status 404 returned error can't find the container with id c665a8f9e5d8db91e42058d7d6ecb459972a73e8efb660e8d51dedc2c99c9afa Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.550901 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.554214 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f5c7d09-45c2-42bf-b441-70fc16504141-cert\") pod \"openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5\" (UID: \"3f5c7d09-45c2-42bf-b441-70fc16504141\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.736014 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.745668 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.750966 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.776092 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz"] Oct 03 13:07:26 crc kubenswrapper[4868]: E1003 13:07:26.796917 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8m2gf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-7d8bb7f44c-49swg_openstack-operators(72d193fe-4b1e-4c77-bda6-a44e1a8318b6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.810204 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.820260 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.823721 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.828074 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.828988 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7"] Oct 03 13:07:26 crc kubenswrapper[4868]: I1003 13:07:26.832929 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk"] Oct 03 13:07:26 crc kubenswrapper[4868]: W1003 13:07:26.857940 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5dbc41e1_0980_4e30_9531_233266c50aca.slice/crio-584db1356e2c42af281643ce2ecadf22218aa8f9d63e07795f916db2c15464d6 WatchSource:0}: Error finding container 584db1356e2c42af281643ce2ecadf22218aa8f9d63e07795f916db2c15464d6: Status 404 returned error can't find the container with id 584db1356e2c42af281643ce2ecadf22218aa8f9d63e07795f916db2c15464d6 Oct 03 13:07:26 crc kubenswrapper[4868]: W1003 13:07:26.872197 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod97a59cbd_cfe1_49ce_9774_fc9bc76a52b0.slice/crio-02e705f5d1e33c8be4c34362efa5b52396d5f85278347f4216381e6ed371bde4 WatchSource:0}: Error finding container 02e705f5d1e33c8be4c34362efa5b52396d5f85278347f4216381e6ed371bde4: Status 404 returned error can't find the container with id 02e705f5d1e33c8be4c34362efa5b52396d5f85278347f4216381e6ed371bde4 Oct 03 13:07:26 crc kubenswrapper[4868]: E1003 13:07:26.872464 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4xlgr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-fcd7d9895-zhdvk_openstack-operators(70e868cf-a7e6-4942-b88a-71cbd6a992af): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:07:26 crc kubenswrapper[4868]: E1003 13:07:26.875252 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dvf8v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-5f97d8c699-69bv7_openstack-operators(97a59cbd-cfe1-49ce-9774-fc9bc76a52b0): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:07:26 crc kubenswrapper[4868]: E1003 13:07:26.875814 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hfprj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-6859f9b676-j7pqs_openstack-operators(5dbc41e1-0980-4e30-9531-233266c50aca): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:07:26 crc kubenswrapper[4868]: E1003 13:07:26.877328 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" podUID="97a59cbd-cfe1-49ce-9774-fc9bc76a52b0" Oct 03 13:07:26 crc kubenswrapper[4868]: E1003 13:07:26.887746 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{600 -3} {} 600m DecimalSI},memory: {{2147483648 0} {} 2Gi BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{536870912 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:cert,ReadOnly:true,MountPath:/tmp/k8s-webhook-server/serving-certs,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kfdr4,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod infra-operator-controller-manager-5fbf469cd7-m4cfh_openstack-operators(6a879dee-0e96-4658-b0b2-ddfa08037b88): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.233723 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" podUID="5dbc41e1-0980-4e30-9531-233266c50aca" Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.235641 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" podUID="72d193fe-4b1e-4c77-bda6-a44e1a8318b6" Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.277875 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" podUID="6a879dee-0e96-4658-b0b2-ddfa08037b88" Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.284000 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" podUID="70e868cf-a7e6-4942-b88a-71cbd6a992af" Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.431962 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" event={"ID":"756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22","Type":"ContainerStarted","Data":"b298737cb055eae836e548427f27557225ad89b9a56ab65be99079c1bfd33e40"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.434389 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" event={"ID":"5dbc41e1-0980-4e30-9531-233266c50aca","Type":"ContainerStarted","Data":"bd0b90bcea2d097ebdef1feacc5ed4c13238978fa771ce898e110472c59ac5e5"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.434425 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" event={"ID":"5dbc41e1-0980-4e30-9531-233266c50aca","Type":"ContainerStarted","Data":"584db1356e2c42af281643ce2ecadf22218aa8f9d63e07795f916db2c15464d6"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.436927 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" event={"ID":"72d193fe-4b1e-4c77-bda6-a44e1a8318b6","Type":"ContainerStarted","Data":"481f5800426dd37d196c0f123cc6f178acad93534cfd0e567d433a6e5a0d2cba"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.436962 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" event={"ID":"72d193fe-4b1e-4c77-bda6-a44e1a8318b6","Type":"ContainerStarted","Data":"63b97c5a7c1160ca6d08e37e42b53146c344e021e287f64b9ea6a4a48d351765"} Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.436982 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" podUID="5dbc41e1-0980-4e30-9531-233266c50aca" Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.438629 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" event={"ID":"546f0324-867d-4a32-a8c2-5e72d95aff3b","Type":"ContainerStarted","Data":"c3a13772024e2593407a8a09fae13b22317d3f93c065faf7de9c0e6c3121e5d1"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.441111 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" event={"ID":"4a104edd-a22b-4767-8124-0e1a0e87a999","Type":"ContainerStarted","Data":"b9502332e2e93e57f07f136eb71baa73c050c633e573cc23a8c910b6e41f2f5b"} Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.441101 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" podUID="72d193fe-4b1e-4c77-bda6-a44e1a8318b6" Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.443931 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" event={"ID":"70e868cf-a7e6-4942-b88a-71cbd6a992af","Type":"ContainerStarted","Data":"ac988ea30aebd650c15593187b9a34fa5c2ecf2b94152d565c7c09ba67167c5c"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.443981 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" event={"ID":"70e868cf-a7e6-4942-b88a-71cbd6a992af","Type":"ContainerStarted","Data":"b645c14aa4b28b6eccbad229a25b8bd2dd0a397e1943d7700343bc244e3c03c1"} Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.446531 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" podUID="70e868cf-a7e6-4942-b88a-71cbd6a992af" Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.454139 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" event={"ID":"10842e9e-e075-4399-88c9-96df14bf7959","Type":"ContainerStarted","Data":"2c0594cfecfb04d66b06787f53d30570dcecb426cf490a98e80b84e314a980e2"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.459948 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" event={"ID":"97a59cbd-cfe1-49ce-9774-fc9bc76a52b0","Type":"ContainerStarted","Data":"02e705f5d1e33c8be4c34362efa5b52396d5f85278347f4216381e6ed371bde4"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.468030 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5"] Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.468092 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" event={"ID":"b8907927-2a99-491c-9305-08f86cb8525d","Type":"ContainerStarted","Data":"3cbae0983cf43c0d84456f95a8c7b343fa49da26095a34b17327c91b0ed39d30"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.470165 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" event={"ID":"6a879dee-0e96-4658-b0b2-ddfa08037b88","Type":"ContainerStarted","Data":"65c82e8e36acf1fb38054aaca214cecdd7307a4db8deca1e85d2fbf09eef1c48"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.470224 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" event={"ID":"6a879dee-0e96-4658-b0b2-ddfa08037b88","Type":"ContainerStarted","Data":"f0594a213023131577c906e2a7714443c7f207feea9abd5d2702dda4f67086b5"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.478548 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" event={"ID":"1f467387-a3f8-4b5b-af79-14eaf2bf799a","Type":"ContainerStarted","Data":"e83c6de814e515e1d9f649feba86db07410faaf25c17a34fe5b0ad34d5e10c4f"} Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.479310 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" podUID="97a59cbd-cfe1-49ce-9774-fc9bc76a52b0" Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.499849 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" event={"ID":"52c85570-4e33-49d4-b6df-a65727f1df56","Type":"ContainerStarted","Data":"c665a8f9e5d8db91e42058d7d6ecb459972a73e8efb660e8d51dedc2c99c9afa"} Oct 03 13:07:27 crc kubenswrapper[4868]: E1003 13:07:27.513672 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475\\\"\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" podUID="6a879dee-0e96-4658-b0b2-ddfa08037b88" Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.527600 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" event={"ID":"89d66ba3-fda2-467a-a2f2-402a8661155b","Type":"ContainerStarted","Data":"2173376712a0195a8da21673d63ef233db07e79e62c33c5f3a208d0e2b494e4b"} Oct 03 13:07:27 crc kubenswrapper[4868]: I1003 13:07:27.527636 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" event={"ID":"89d66ba3-fda2-467a-a2f2-402a8661155b","Type":"ContainerStarted","Data":"1bbac4c2e795c1b4a5755108306e0a41526a75c57fcf88ad1d5166ad623b71dc"} Oct 03 13:07:28 crc kubenswrapper[4868]: I1003 13:07:28.536940 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" event={"ID":"3f5c7d09-45c2-42bf-b441-70fc16504141","Type":"ContainerStarted","Data":"bbc3aa1f6364da221e80d49b872727f6cb817d52836e3243533cb4afb2e53daf"} Oct 03 13:07:28 crc kubenswrapper[4868]: I1003 13:07:28.541970 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" event={"ID":"89d66ba3-fda2-467a-a2f2-402a8661155b","Type":"ContainerStarted","Data":"8a814dee26d9cc3cf607bffff4543f1a4bd4020a4bb61ad04d323fa842eebcc7"} Oct 03 13:07:28 crc kubenswrapper[4868]: I1003 13:07:28.542023 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:28 crc kubenswrapper[4868]: E1003 13:07:28.549741 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/infra-operator@sha256:40fb1819b6639807b77ef79448d35f1e4bfc1838a09d4f380e9fa0f755352475\\\"\"" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" podUID="6a879dee-0e96-4658-b0b2-ddfa08037b88" Oct 03 13:07:28 crc kubenswrapper[4868]: E1003 13:07:28.550175 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" podUID="97a59cbd-cfe1-49ce-9774-fc9bc76a52b0" Oct 03 13:07:28 crc kubenswrapper[4868]: E1003 13:07:28.550224 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/placement-operator@sha256:725da67b3f9cf2758564e0111928cdd570c0f6f1ca34775f159bbe94deb82548\\\"\"" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" podUID="72d193fe-4b1e-4c77-bda6-a44e1a8318b6" Oct 03 13:07:28 crc kubenswrapper[4868]: E1003 13:07:28.550743 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/watcher-operator@sha256:018151bd5ff830ec03c6b8e3d53cfb9456ca6e1e34793bdd4f7edd39a0146fa6\\\"\"" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" podUID="70e868cf-a7e6-4942-b88a-71cbd6a992af" Oct 03 13:07:28 crc kubenswrapper[4868]: E1003 13:07:28.552235 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:637bb7b9ac308bc1e323391a3593b824f688090a856c83385814c17a571b1eed\\\"\"" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" podUID="5dbc41e1-0980-4e30-9531-233266c50aca" Oct 03 13:07:28 crc kubenswrapper[4868]: I1003 13:07:28.681868 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" podStartSLOduration=4.681851556 podStartE2EDuration="4.681851556s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:07:28.681340453 +0000 UTC m=+1044.891189549" watchObservedRunningTime="2025-10-03 13:07:28.681851556 +0000 UTC m=+1044.891700622" Oct 03 13:07:35 crc kubenswrapper[4868]: I1003 13:07:35.662492 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-5c4446bf96-6xvjc" Oct 03 13:07:39 crc kubenswrapper[4868]: E1003 13:07:39.151317 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:229213522e05cbd3034bb80a8ddb1c701cf5f6d74c696e8085597ef6da27ca4b" Oct 03 13:07:39 crc kubenswrapper[4868]: E1003 13:07:39.151866 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:229213522e05cbd3034bb80a8ddb1c701cf5f6d74c696e8085597ef6da27ca4b,Command:[/manager],Args:[--health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080 --leader-elect],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zrdw2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-79d68d6c85-6pz2t_openstack-operators(9efa809d-5837-4900-a456-84edfb2ba501): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:07:39 crc kubenswrapper[4868]: E1003 13:07:39.347705 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" podUID="9efa809d-5837-4900-a456-84edfb2ba501" Oct 03 13:07:39 crc kubenswrapper[4868]: I1003 13:07:39.636788 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" event={"ID":"f125746c-8bbb-499e-95e0-2dd8071d914e","Type":"ContainerStarted","Data":"12ab50affcfbc984bc66b1c1f609aa001a652f0fb1cc778d6a63dd848fe722d1"} Oct 03 13:07:39 crc kubenswrapper[4868]: I1003 13:07:39.645542 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" event={"ID":"10842e9e-e075-4399-88c9-96df14bf7959","Type":"ContainerStarted","Data":"cc7aab9562f90391b1f2b52683f5cb05df507208728b647ef69292c09f4803ac"} Oct 03 13:07:39 crc kubenswrapper[4868]: I1003 13:07:39.648988 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" event={"ID":"3d34922b-c3d5-4795-be9c-a39e2542f42d","Type":"ContainerStarted","Data":"c61afbc1f5e3ec256912e14db4676621533c1cb44645d2710a0c302d639424ed"} Oct 03 13:07:39 crc kubenswrapper[4868]: I1003 13:07:39.652516 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" event={"ID":"9efa809d-5837-4900-a456-84edfb2ba501","Type":"ContainerStarted","Data":"151a09d6768e63f48a2704efb7e965fae02fea37ea6e3395bcd55f90e5979715"} Oct 03 13:07:39 crc kubenswrapper[4868]: E1003 13:07:39.654481 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:229213522e05cbd3034bb80a8ddb1c701cf5f6d74c696e8085597ef6da27ca4b\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" podUID="9efa809d-5837-4900-a456-84edfb2ba501" Oct 03 13:07:39 crc kubenswrapper[4868]: I1003 13:07:39.656126 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" event={"ID":"5c06b85f-d6da-4e5f-a817-f01a18b0217c","Type":"ContainerStarted","Data":"98e03bbb20da6b2ff26a723a70bd557038fda68d3f9512013d3e0bb5b22e99d1"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.665545 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" event={"ID":"a9341a61-ad61-4ab0-8056-fea9a2e0644e","Type":"ContainerStarted","Data":"ba39eb9760ad939a421e3b8aa5ea715b1d0fd3f8f6b695c84200805cc8ab9154"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.667146 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" event={"ID":"b8907927-2a99-491c-9305-08f86cb8525d","Type":"ContainerStarted","Data":"1c0da0e80873507faac325b8e485028a1d0bbdb1c93b5e93b76463516a813d7e"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.669318 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" event={"ID":"3f5c7d09-45c2-42bf-b441-70fc16504141","Type":"ContainerStarted","Data":"cc4f9c9c6be87ef6a6f28ca33dc9571405c170024b0d9651bf9536807712830f"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.670923 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" event={"ID":"1121fe0b-eb0d-43f2-b503-85a3a3601c7e","Type":"ContainerStarted","Data":"fde548203ae36d24fb16bf225aed2a816b013b045d79688bb36b77564b9cc49b"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.672629 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" event={"ID":"546f0324-867d-4a32-a8c2-5e72d95aff3b","Type":"ContainerStarted","Data":"4a0cdaa5e96088f615325102ea45582165e09dc7a3057966dd3694a7fbef2831"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.691170 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" event={"ID":"ac74b0fc-4221-46e6-b88a-f9bd4a484952","Type":"ContainerStarted","Data":"6ad1ab6bff415e36a4fcacfcff71073554bee854b877268282aad5a892d0f36b"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.696159 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" event={"ID":"52c85570-4e33-49d4-b6df-a65727f1df56","Type":"ContainerStarted","Data":"52b70675ad134071ebd3898eb891ae123347d27f19dd3d3809164d6e1e329ea9"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.697621 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" event={"ID":"756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22","Type":"ContainerStarted","Data":"0a720ba19b8db7f5631cbd22bfe7ecd9420a3d007972069e647beac0fd6cff1e"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.703297 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" event={"ID":"ed9b7dc9-9145-42db-bed4-c4cf3f22c07f","Type":"ContainerStarted","Data":"3993975e61fe5acc0917fcb5e9cb16c92706d7ba5ed03acf8d6fa489aa94981e"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.705495 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" event={"ID":"96f01adb-73f9-45c4-bf04-677ffa2942e2","Type":"ContainerStarted","Data":"111f5b1ad9fb12534535e0e7a7857d7a302c4917e884ad0d6b72a956b6c073cd"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.721894 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" event={"ID":"1f467387-a3f8-4b5b-af79-14eaf2bf799a","Type":"ContainerStarted","Data":"69ff8a377c299652e97c03c99fe3267a8e3bb1fa8c0b7e838ab9f13bca12a0d3"} Oct 03 13:07:40 crc kubenswrapper[4868]: I1003 13:07:40.724175 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" event={"ID":"4a104edd-a22b-4767-8124-0e1a0e87a999","Type":"ContainerStarted","Data":"3dde28554c91b9182c608391327567477a4a0c673ac27b7f07c87c15168dee75"} Oct 03 13:07:40 crc kubenswrapper[4868]: E1003 13:07:40.725409 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/cinder-operator@sha256:229213522e05cbd3034bb80a8ddb1c701cf5f6d74c696e8085597ef6da27ca4b\\\"\"" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" podUID="9efa809d-5837-4900-a456-84edfb2ba501" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.732601 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" event={"ID":"f125746c-8bbb-499e-95e0-2dd8071d914e","Type":"ContainerStarted","Data":"6dcf4db5c992cdac9e0d62e098f186dc5c6c047c108c54b00cbb891f8b133f97"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.732949 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.735514 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" event={"ID":"a9341a61-ad61-4ab0-8056-fea9a2e0644e","Type":"ContainerStarted","Data":"ea691ce7fe6efc104b06fdc0164a7e75558b6c9b3e161bba98dbea93c4369711"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.735540 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.737585 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" event={"ID":"1121fe0b-eb0d-43f2-b503-85a3a3601c7e","Type":"ContainerStarted","Data":"4b2e54ae051d7ce224f29bd199e8e1bc32e7b9a19b0d1f332e4624101cb2ac57"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.737971 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.740128 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" event={"ID":"ac74b0fc-4221-46e6-b88a-f9bd4a484952","Type":"ContainerStarted","Data":"12e06ac9cda14df90371f7b5214b09621bb41013f74a5f6cd0dc7a8cc18d69d5"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.740499 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.742030 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" event={"ID":"1f467387-a3f8-4b5b-af79-14eaf2bf799a","Type":"ContainerStarted","Data":"90a82c62559d6ff15d2db920b9499ff8c2135388227f2129239efa3e2b6bba63"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.742399 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.743756 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" event={"ID":"5c06b85f-d6da-4e5f-a817-f01a18b0217c","Type":"ContainerStarted","Data":"592eb077e8f1aa44670c80c3e62544ba012ac37a8abb129e85b680ce7a9d7b7a"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.744107 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.745777 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" event={"ID":"52c85570-4e33-49d4-b6df-a65727f1df56","Type":"ContainerStarted","Data":"b313fefb5f9f267a7d54ea8f2516ca9ea1e420a03851e0ec7b80ff2ef2c16df3"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.746166 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.748152 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" event={"ID":"3f5c7d09-45c2-42bf-b441-70fc16504141","Type":"ContainerStarted","Data":"b4c12452c306e5bde6ba63b375d987b9ae0ea405a1a416faf6d1f7946f3fd106"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.748504 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.750403 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" event={"ID":"546f0324-867d-4a32-a8c2-5e72d95aff3b","Type":"ContainerStarted","Data":"752a3f5c4fe2310ccfd62c403cc5df32430268572ea6a5acc62b27b134f39863"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.750748 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.755318 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" event={"ID":"4a104edd-a22b-4767-8124-0e1a0e87a999","Type":"ContainerStarted","Data":"d676bcde9131cf234d4558f466fd4d1a18fee598c85b4055b13dacd8d3e3ad0d"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.755561 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.757573 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" event={"ID":"756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22","Type":"ContainerStarted","Data":"37e1e0a7660b5d91b1ddd97c0af0a487fcee95ce353bd876b6c44705f7cb1a8e"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.757661 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.759582 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" event={"ID":"10842e9e-e075-4399-88c9-96df14bf7959","Type":"ContainerStarted","Data":"7e70226d9a2b14fb17ea04dcbac5ab79efda4bac4c717969e2af5d46641a7583"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.759967 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.764501 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" podStartSLOduration=4.833377513 podStartE2EDuration="17.764482684s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.277363523 +0000 UTC m=+1042.487212589" lastFinishedPulling="2025-10-03 13:07:39.208468694 +0000 UTC m=+1055.418317760" observedRunningTime="2025-10-03 13:07:41.76022555 +0000 UTC m=+1057.970074616" watchObservedRunningTime="2025-10-03 13:07:41.764482684 +0000 UTC m=+1057.974331750" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.770601 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" event={"ID":"b8907927-2a99-491c-9305-08f86cb8525d","Type":"ContainerStarted","Data":"0e75b685bc2fa38a39de56df186647a454ab4e01b3b98fe5008c416177da9fb8"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.770790 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.772990 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" event={"ID":"96f01adb-73f9-45c4-bf04-677ffa2942e2","Type":"ContainerStarted","Data":"726a6e6036395fe9f0f442f82bbf6e7cfe1e4051770272c8d458eb72b1ec5d47"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.773239 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.778239 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" event={"ID":"ed9b7dc9-9145-42db-bed4-c4cf3f22c07f","Type":"ContainerStarted","Data":"9f1bef7ac4354d2a0e7f8e4945c03a0c72b7540b08f2b8e81ac7061fbed7cb5c"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.778407 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.780720 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" event={"ID":"3d34922b-c3d5-4795-be9c-a39e2542f42d","Type":"ContainerStarted","Data":"9c2913d5109448166a5c26115672c52795bb649df9e201b294bb1261d446b915"} Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.781314 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.797023 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" podStartSLOduration=4.100331999 podStartE2EDuration="17.797004207s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:25.513268156 +0000 UTC m=+1041.723117222" lastFinishedPulling="2025-10-03 13:07:39.209940364 +0000 UTC m=+1055.419789430" observedRunningTime="2025-10-03 13:07:41.793899113 +0000 UTC m=+1058.003748179" watchObservedRunningTime="2025-10-03 13:07:41.797004207 +0000 UTC m=+1058.006853263" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.821340 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" podStartSLOduration=5.335764007 podStartE2EDuration="17.82132373s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.769037499 +0000 UTC m=+1042.978886565" lastFinishedPulling="2025-10-03 13:07:39.254597222 +0000 UTC m=+1055.464446288" observedRunningTime="2025-10-03 13:07:41.817946009 +0000 UTC m=+1058.027795085" watchObservedRunningTime="2025-10-03 13:07:41.82132373 +0000 UTC m=+1058.031172796" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.841511 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" podStartSLOduration=5.009371206 podStartE2EDuration="17.841490911s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.428255643 +0000 UTC m=+1042.638104709" lastFinishedPulling="2025-10-03 13:07:39.260375348 +0000 UTC m=+1055.470224414" observedRunningTime="2025-10-03 13:07:41.83774298 +0000 UTC m=+1058.047592046" watchObservedRunningTime="2025-10-03 13:07:41.841490911 +0000 UTC m=+1058.051339977" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.860277 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" podStartSLOduration=5.027921804 podStartE2EDuration="17.860253584s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.428297124 +0000 UTC m=+1042.638146190" lastFinishedPulling="2025-10-03 13:07:39.260628894 +0000 UTC m=+1055.470477970" observedRunningTime="2025-10-03 13:07:41.856418982 +0000 UTC m=+1058.066268038" watchObservedRunningTime="2025-10-03 13:07:41.860253584 +0000 UTC m=+1058.070102660" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.886444 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" podStartSLOduration=5.028405448 podStartE2EDuration="17.886426427s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.403471678 +0000 UTC m=+1042.613320744" lastFinishedPulling="2025-10-03 13:07:39.261492657 +0000 UTC m=+1055.471341723" observedRunningTime="2025-10-03 13:07:41.88281876 +0000 UTC m=+1058.092667816" watchObservedRunningTime="2025-10-03 13:07:41.886426427 +0000 UTC m=+1058.096275483" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.918509 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" podStartSLOduration=4.929484392 podStartE2EDuration="17.918493197s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.265465014 +0000 UTC m=+1042.475314080" lastFinishedPulling="2025-10-03 13:07:39.254473819 +0000 UTC m=+1055.464322885" observedRunningTime="2025-10-03 13:07:41.917360507 +0000 UTC m=+1058.127209593" watchObservedRunningTime="2025-10-03 13:07:41.918493197 +0000 UTC m=+1058.128342263" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.943031 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" podStartSLOduration=4.605608251 podStartE2EDuration="17.943011196s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:25.924244326 +0000 UTC m=+1042.134093392" lastFinishedPulling="2025-10-03 13:07:39.261647271 +0000 UTC m=+1055.471496337" observedRunningTime="2025-10-03 13:07:41.934649721 +0000 UTC m=+1058.144498787" watchObservedRunningTime="2025-10-03 13:07:41.943011196 +0000 UTC m=+1058.152860262" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.966884 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" podStartSLOduration=6.201230584 podStartE2EDuration="17.966866566s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:27.513117039 +0000 UTC m=+1043.722966105" lastFinishedPulling="2025-10-03 13:07:39.278753031 +0000 UTC m=+1055.488602087" observedRunningTime="2025-10-03 13:07:41.963084944 +0000 UTC m=+1058.172934010" watchObservedRunningTime="2025-10-03 13:07:41.966866566 +0000 UTC m=+1058.176715632" Oct 03 13:07:41 crc kubenswrapper[4868]: I1003 13:07:41.986495 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" podStartSLOduration=4.39657747 podStartE2EDuration="17.986480142s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:25.671216575 +0000 UTC m=+1041.881065641" lastFinishedPulling="2025-10-03 13:07:39.261119247 +0000 UTC m=+1055.470968313" observedRunningTime="2025-10-03 13:07:41.981393445 +0000 UTC m=+1058.191242501" watchObservedRunningTime="2025-10-03 13:07:41.986480142 +0000 UTC m=+1058.196329208" Oct 03 13:07:42 crc kubenswrapper[4868]: I1003 13:07:42.008123 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" podStartSLOduration=5.170888911 podStartE2EDuration="18.008106692s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.423176127 +0000 UTC m=+1042.633025203" lastFinishedPulling="2025-10-03 13:07:39.260393918 +0000 UTC m=+1055.470242984" observedRunningTime="2025-10-03 13:07:42.004170937 +0000 UTC m=+1058.214020023" watchObservedRunningTime="2025-10-03 13:07:42.008106692 +0000 UTC m=+1058.217955758" Oct 03 13:07:42 crc kubenswrapper[4868]: I1003 13:07:42.027490 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" podStartSLOduration=5.538684572 podStartE2EDuration="18.027472562s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.772290376 +0000 UTC m=+1042.982139442" lastFinishedPulling="2025-10-03 13:07:39.261078366 +0000 UTC m=+1055.470927432" observedRunningTime="2025-10-03 13:07:42.027468992 +0000 UTC m=+1058.237318058" watchObservedRunningTime="2025-10-03 13:07:42.027472562 +0000 UTC m=+1058.237321628" Oct 03 13:07:42 crc kubenswrapper[4868]: I1003 13:07:42.047995 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" podStartSLOduration=4.3354527990000005 podStartE2EDuration="18.047973672s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:25.513572204 +0000 UTC m=+1041.723421270" lastFinishedPulling="2025-10-03 13:07:39.226093077 +0000 UTC m=+1055.435942143" observedRunningTime="2025-10-03 13:07:42.04677045 +0000 UTC m=+1058.256619536" watchObservedRunningTime="2025-10-03 13:07:42.047973672 +0000 UTC m=+1058.257822758" Oct 03 13:07:42 crc kubenswrapper[4868]: I1003 13:07:42.073386 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" podStartSLOduration=4.687628181 podStartE2EDuration="18.073367144s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:25.87299083 +0000 UTC m=+1042.082839896" lastFinishedPulling="2025-10-03 13:07:39.258729793 +0000 UTC m=+1055.468578859" observedRunningTime="2025-10-03 13:07:42.070229869 +0000 UTC m=+1058.280078935" watchObservedRunningTime="2025-10-03 13:07:42.073367144 +0000 UTC m=+1058.283216210" Oct 03 13:07:42 crc kubenswrapper[4868]: I1003 13:07:42.096417 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" podStartSLOduration=4.878653188 podStartE2EDuration="18.096395382s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.036699154 +0000 UTC m=+1042.246548220" lastFinishedPulling="2025-10-03 13:07:39.254441348 +0000 UTC m=+1055.464290414" observedRunningTime="2025-10-03 13:07:42.090163675 +0000 UTC m=+1058.300012741" watchObservedRunningTime="2025-10-03 13:07:42.096395382 +0000 UTC m=+1058.306244448" Oct 03 13:07:42 crc kubenswrapper[4868]: I1003 13:07:42.117346 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" podStartSLOduration=5.625298767 podStartE2EDuration="18.117322684s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.768468504 +0000 UTC m=+1042.978317570" lastFinishedPulling="2025-10-03 13:07:39.260492421 +0000 UTC m=+1055.470341487" observedRunningTime="2025-10-03 13:07:42.11237047 +0000 UTC m=+1058.322219536" watchObservedRunningTime="2025-10-03 13:07:42.117322684 +0000 UTC m=+1058.327171740" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.517653 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-6c675fb79f-82mnx" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.565677 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-75dfd9b554-nmp74" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.655501 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-6769b867d9-8dh82" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.777824 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-84bc9db6cc-lsdv9" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.807366 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" event={"ID":"97a59cbd-cfe1-49ce-9774-fc9bc76a52b0","Type":"ContainerStarted","Data":"4ad5607a7fc1c3c761c975d51994892fd0cd4482da137da97dd36544b945bd6c"} Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.812281 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7f55849f88-mql26" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.819570 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" event={"ID":"5dbc41e1-0980-4e30-9531-233266c50aca","Type":"ContainerStarted","Data":"30f3ca948785baa1f22a4c574ffc51084b308fd4e307211e34fdef562c0d7ebb"} Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.820563 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.834648 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-5f97d8c699-69bv7" podStartSLOduration=2.334797574 podStartE2EDuration="19.834624582s" podCreationTimestamp="2025-10-03 13:07:25 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.875117986 +0000 UTC m=+1043.084967052" lastFinishedPulling="2025-10-03 13:07:44.374944994 +0000 UTC m=+1060.584794060" observedRunningTime="2025-10-03 13:07:44.82523226 +0000 UTC m=+1061.035081326" watchObservedRunningTime="2025-10-03 13:07:44.834624582 +0000 UTC m=+1061.044473648" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.870986 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" podStartSLOduration=3.372686769 podStartE2EDuration="20.870963497s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.875731172 +0000 UTC m=+1043.085580238" lastFinishedPulling="2025-10-03 13:07:44.3740079 +0000 UTC m=+1060.583856966" observedRunningTime="2025-10-03 13:07:44.864799261 +0000 UTC m=+1061.074648337" watchObservedRunningTime="2025-10-03 13:07:44.870963497 +0000 UTC m=+1061.080812573" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.914731 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-6fd6854b49-fvf5f" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.955287 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-6574bf987d-t7vpl" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.962141 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-555c7456bd-jkjfz" Oct 03 13:07:44 crc kubenswrapper[4868]: I1003 13:07:44.985792 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-59d6cfdf45-8j6dq" Oct 03 13:07:45 crc kubenswrapper[4868]: I1003 13:07:45.303192 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-688db7b6c7-wbsqt" Oct 03 13:07:45 crc kubenswrapper[4868]: I1003 13:07:45.321303 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-5db5cf686f-zlj2b" Oct 03 13:07:45 crc kubenswrapper[4868]: I1003 13:07:45.398775 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5cd5cb47d7-zftwb" Oct 03 13:07:46 crc kubenswrapper[4868]: I1003 13:07:46.833012 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5" Oct 03 13:07:46 crc kubenswrapper[4868]: I1003 13:07:46.835859 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" event={"ID":"72d193fe-4b1e-4c77-bda6-a44e1a8318b6","Type":"ContainerStarted","Data":"af9c76e353c18c2c79cffca52e3b88aa004e5954bff51f55db040fe0fa741672"} Oct 03 13:07:46 crc kubenswrapper[4868]: I1003 13:07:46.836189 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" Oct 03 13:07:46 crc kubenswrapper[4868]: I1003 13:07:46.837896 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" event={"ID":"70e868cf-a7e6-4942-b88a-71cbd6a992af","Type":"ContainerStarted","Data":"3f6fec90e9934a77a7cc05fd89d72484f63b2f30b7725fbe128328b0de97cc59"} Oct 03 13:07:46 crc kubenswrapper[4868]: I1003 13:07:46.838082 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" Oct 03 13:07:46 crc kubenswrapper[4868]: I1003 13:07:46.976856 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" podStartSLOduration=3.727294578 podStartE2EDuration="22.976836816s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.872322161 +0000 UTC m=+1043.082171227" lastFinishedPulling="2025-10-03 13:07:46.121864399 +0000 UTC m=+1062.331713465" observedRunningTime="2025-10-03 13:07:46.946669216 +0000 UTC m=+1063.156518282" watchObservedRunningTime="2025-10-03 13:07:46.976836816 +0000 UTC m=+1063.186685882" Oct 03 13:07:47 crc kubenswrapper[4868]: I1003 13:07:47.845918 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" event={"ID":"6a879dee-0e96-4658-b0b2-ddfa08037b88","Type":"ContainerStarted","Data":"776dc6c07e949a6fa1665aa690c3a816c4ef63e167c01119b5d311593c8d09cb"} Oct 03 13:07:47 crc kubenswrapper[4868]: I1003 13:07:47.846440 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:07:47 crc kubenswrapper[4868]: I1003 13:07:47.866475 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" podStartSLOduration=4.033117015 podStartE2EDuration="23.866448571s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.887568411 +0000 UTC m=+1043.097417487" lastFinishedPulling="2025-10-03 13:07:46.720899977 +0000 UTC m=+1062.930749043" observedRunningTime="2025-10-03 13:07:47.863293037 +0000 UTC m=+1064.073142113" watchObservedRunningTime="2025-10-03 13:07:47.866448571 +0000 UTC m=+1064.076297647" Oct 03 13:07:47 crc kubenswrapper[4868]: I1003 13:07:47.869096 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" podStartSLOduration=4.544701205 podStartE2EDuration="23.869043831s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:26.796725512 +0000 UTC m=+1043.006574578" lastFinishedPulling="2025-10-03 13:07:46.121068138 +0000 UTC m=+1062.330917204" observedRunningTime="2025-10-03 13:07:46.981791798 +0000 UTC m=+1063.191640864" watchObservedRunningTime="2025-10-03 13:07:47.869043831 +0000 UTC m=+1064.078892897" Oct 03 13:07:53 crc kubenswrapper[4868]: I1003 13:07:53.886594 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" event={"ID":"9efa809d-5837-4900-a456-84edfb2ba501","Type":"ContainerStarted","Data":"650faaed1f37307141e514e793cb5ba0a226379c49fee900773055ee29f99d60"} Oct 03 13:07:53 crc kubenswrapper[4868]: I1003 13:07:53.887297 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" Oct 03 13:07:53 crc kubenswrapper[4868]: I1003 13:07:53.908355 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" podStartSLOduration=2.64496154 podStartE2EDuration="29.908331676s" podCreationTimestamp="2025-10-03 13:07:24 +0000 UTC" firstStartedPulling="2025-10-03 13:07:25.694366137 +0000 UTC m=+1041.904215203" lastFinishedPulling="2025-10-03 13:07:52.957736273 +0000 UTC m=+1069.167585339" observedRunningTime="2025-10-03 13:07:53.907311989 +0000 UTC m=+1070.117161055" watchObservedRunningTime="2025-10-03 13:07:53.908331676 +0000 UTC m=+1070.118180742" Oct 03 13:07:54 crc kubenswrapper[4868]: I1003 13:07:54.591723 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-846dff85b5-txz24" Oct 03 13:07:54 crc kubenswrapper[4868]: I1003 13:07:54.610874 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-599898f689-r8szh" Oct 03 13:07:54 crc kubenswrapper[4868]: I1003 13:07:54.948599 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-5c468bf4d4-lsvj8" Oct 03 13:07:55 crc kubenswrapper[4868]: I1003 13:07:55.067712 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-7d8bb7f44c-49swg" Oct 03 13:07:55 crc kubenswrapper[4868]: I1003 13:07:55.092835 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-6859f9b676-j7pqs" Oct 03 13:07:55 crc kubenswrapper[4868]: I1003 13:07:55.574263 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-fcd7d9895-zhdvk" Oct 03 13:07:56 crc kubenswrapper[4868]: I1003 13:07:56.282290 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-5fbf469cd7-m4cfh" Oct 03 13:08:04 crc kubenswrapper[4868]: I1003 13:08:04.518423 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-79d68d6c85-6pz2t" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.299235 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-d8xhw"] Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.301449 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.303711 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.303988 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.304162 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-n7c2w" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.305663 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.322276 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-d8xhw"] Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.368947 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5e289-55a0-4970-b10f-1ac2fbd49039-config\") pod \"dnsmasq-dns-675f4bcbfc-d8xhw\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.369012 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp922\" (UniqueName: \"kubernetes.io/projected/c6c5e289-55a0-4970-b10f-1ac2fbd49039-kube-api-access-gp922\") pod \"dnsmasq-dns-675f4bcbfc-d8xhw\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.436796 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x99j6"] Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.438015 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.441983 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.449887 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x99j6"] Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.470033 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5e289-55a0-4970-b10f-1ac2fbd49039-config\") pod \"dnsmasq-dns-675f4bcbfc-d8xhw\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.470123 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp922\" (UniqueName: \"kubernetes.io/projected/c6c5e289-55a0-4970-b10f-1ac2fbd49039-kube-api-access-gp922\") pod \"dnsmasq-dns-675f4bcbfc-d8xhw\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.471178 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5e289-55a0-4970-b10f-1ac2fbd49039-config\") pod \"dnsmasq-dns-675f4bcbfc-d8xhw\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.505809 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp922\" (UniqueName: \"kubernetes.io/projected/c6c5e289-55a0-4970-b10f-1ac2fbd49039-kube-api-access-gp922\") pod \"dnsmasq-dns-675f4bcbfc-d8xhw\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.573607 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d2g6n\" (UniqueName: \"kubernetes.io/projected/d8085517-4ee6-470e-b3f6-4e993c096a8f-kube-api-access-d2g6n\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.573694 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.574038 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-config\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.621336 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.675633 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.676549 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.676623 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-config\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.676769 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d2g6n\" (UniqueName: \"kubernetes.io/projected/d8085517-4ee6-470e-b3f6-4e993c096a8f-kube-api-access-d2g6n\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.677630 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-config\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.696822 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d2g6n\" (UniqueName: \"kubernetes.io/projected/d8085517-4ee6-470e-b3f6-4e993c096a8f-kube-api-access-d2g6n\") pod \"dnsmasq-dns-78dd6ddcc-x99j6\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:22 crc kubenswrapper[4868]: I1003 13:08:22.756200 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:23 crc kubenswrapper[4868]: I1003 13:08:23.115623 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-d8xhw"] Oct 03 13:08:23 crc kubenswrapper[4868]: I1003 13:08:23.125856 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:08:23 crc kubenswrapper[4868]: I1003 13:08:23.181784 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x99j6"] Oct 03 13:08:23 crc kubenswrapper[4868]: W1003 13:08:23.183808 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd8085517_4ee6_470e_b3f6_4e993c096a8f.slice/crio-74bdd44a531bad0b083a34a986f0f4bbe210852e530dfb438dd113fadff2a912 WatchSource:0}: Error finding container 74bdd44a531bad0b083a34a986f0f4bbe210852e530dfb438dd113fadff2a912: Status 404 returned error can't find the container with id 74bdd44a531bad0b083a34a986f0f4bbe210852e530dfb438dd113fadff2a912 Oct 03 13:08:24 crc kubenswrapper[4868]: I1003 13:08:24.085670 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" event={"ID":"d8085517-4ee6-470e-b3f6-4e993c096a8f","Type":"ContainerStarted","Data":"74bdd44a531bad0b083a34a986f0f4bbe210852e530dfb438dd113fadff2a912"} Oct 03 13:08:24 crc kubenswrapper[4868]: I1003 13:08:24.087626 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" event={"ID":"c6c5e289-55a0-4970-b10f-1ac2fbd49039","Type":"ContainerStarted","Data":"f9c26d7d55fd7ea35ae4209b74e2543888e43f74824c2cd0230f1fab5eed476b"} Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.267404 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-d8xhw"] Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.286036 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6d6hl"] Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.288331 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.298695 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6d6hl"] Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.446873 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-dns-svc\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.446973 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-config\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.447016 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkpdc\" (UniqueName: \"kubernetes.io/projected/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-kube-api-access-wkpdc\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.535768 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x99j6"] Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.548173 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-dns-svc\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.548218 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-config\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.548260 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkpdc\" (UniqueName: \"kubernetes.io/projected/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-kube-api-access-wkpdc\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.549314 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-dns-svc\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.549450 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-config\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.559918 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qpj9d"] Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.561564 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.572615 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qpj9d"] Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.584239 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkpdc\" (UniqueName: \"kubernetes.io/projected/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-kube-api-access-wkpdc\") pod \"dnsmasq-dns-666b6646f7-6d6hl\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.651450 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.651512 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-config\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.651588 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2xln\" (UniqueName: \"kubernetes.io/projected/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-kube-api-access-c2xln\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.672327 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.753115 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.753172 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-config\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.753208 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2xln\" (UniqueName: \"kubernetes.io/projected/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-kube-api-access-c2xln\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.754180 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.754243 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-config\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.781875 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2xln\" (UniqueName: \"kubernetes.io/projected/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-kube-api-access-c2xln\") pod \"dnsmasq-dns-57d769cc4f-qpj9d\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:25 crc kubenswrapper[4868]: I1003 13:08:25.929820 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.188584 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6d6hl"] Oct 03 13:08:26 crc kubenswrapper[4868]: W1003 13:08:26.199916 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5b33b7b6_6204_48fa_9dea_cf7d3127cd79.slice/crio-061ca7ef95b6e028055c84b7d0f0f90070f6ec2388175f2b70ff24b64a2dff55 WatchSource:0}: Error finding container 061ca7ef95b6e028055c84b7d0f0f90070f6ec2388175f2b70ff24b64a2dff55: Status 404 returned error can't find the container with id 061ca7ef95b6e028055c84b7d0f0f90070f6ec2388175f2b70ff24b64a2dff55 Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.404189 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.405800 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.411525 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.411938 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.413370 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.413652 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.414318 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.414319 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.414326 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rxtgr" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.418794 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qpj9d"] Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.427032 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573178 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573285 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573315 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573342 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-config-data\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573364 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573383 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573437 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573481 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573507 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573539 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hdcmq\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-kube-api-access-hdcmq\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.573566 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.668191 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.669632 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.673705 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.674376 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.674650 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.674847 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.675084 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.675275 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-t6qw8" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.675667 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677268 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677338 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hdcmq\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-kube-api-access-hdcmq\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677373 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677422 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677447 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677473 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677499 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-config-data\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677519 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677539 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677598 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.677657 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.685349 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.689235 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.696734 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.698484 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-server-conf\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.699280 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.700783 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-config-data\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.700596 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.705903 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.705990 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.714150 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-pod-info\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.716322 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hdcmq\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-kube-api-access-hdcmq\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.721332 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.742347 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " pod="openstack/rabbitmq-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778534 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778594 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778634 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8e1f1515-2197-4124-83dc-382a70cd3e91-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778721 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778767 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778806 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778859 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.778991 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8e1f1515-2197-4124-83dc-382a70cd3e91-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.779120 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.779194 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqcxf\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-kube-api-access-mqcxf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.779378 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.881691 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.881754 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.881813 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8e1f1515-2197-4124-83dc-382a70cd3e91-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.881847 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.881915 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqcxf\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-kube-api-access-mqcxf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.881980 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.882033 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.882080 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.882123 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8e1f1515-2197-4124-83dc-382a70cd3e91-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.882151 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.882181 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.882257 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.882851 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.883087 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.883139 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.883836 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.884321 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.887856 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8e1f1515-2197-4124-83dc-382a70cd3e91-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.892347 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.897769 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.900926 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqcxf\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-kube-api-access-mqcxf\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.904372 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8e1f1515-2197-4124-83dc-382a70cd3e91-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:26 crc kubenswrapper[4868]: I1003 13:08:26.907122 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:27 crc kubenswrapper[4868]: I1003 13:08:27.033237 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:08:27 crc kubenswrapper[4868]: I1003 13:08:27.092278 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:08:27 crc kubenswrapper[4868]: I1003 13:08:27.142241 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" event={"ID":"5b33b7b6-6204-48fa-9dea-cf7d3127cd79","Type":"ContainerStarted","Data":"061ca7ef95b6e028055c84b7d0f0f90070f6ec2388175f2b70ff24b64a2dff55"} Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.463818 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.466608 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.471496 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.472087 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.472520 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.474011 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-jmpsz" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.474474 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.501690 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.503467 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.524233 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.526166 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532252 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532328 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532393 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532438 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532466 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkmhk\" (UniqueName: \"kubernetes.io/projected/b28a570d-3c2f-43c5-8be6-908d8ecabb08-kube-api-access-wkmhk\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532489 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532535 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b28a570d-3c2f-43c5-8be6-908d8ecabb08-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532575 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532598 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532765 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.532945 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-wrlxx" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.533121 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.533249 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.551522 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.633870 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.635479 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-secrets\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.635616 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.635742 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.635840 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.635943 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-config-data-default\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.636102 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.636365 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.636479 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7a386b95-6440-43fb-88c4-9e48c2277ca5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.636584 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-kolla-config\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.636685 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.636891 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkmhk\" (UniqueName: \"kubernetes.io/projected/b28a570d-3c2f-43c5-8be6-908d8ecabb08-kube-api-access-wkmhk\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.636995 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.637115 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-plvtb\" (UniqueName: \"kubernetes.io/projected/7a386b95-6440-43fb-88c4-9e48c2277ca5-kube-api-access-plvtb\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.637377 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b28a570d-3c2f-43c5-8be6-908d8ecabb08-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.637168 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.637862 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/b28a570d-3c2f-43c5-8be6-908d8ecabb08-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.638315 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.637392 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.638940 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.639072 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.639191 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.644177 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/b28a570d-3c2f-43c5-8be6-908d8ecabb08-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.692323 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.693799 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.694714 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-secrets\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.705961 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/b28a570d-3c2f-43c5-8be6-908d8ecabb08-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.713228 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkmhk\" (UniqueName: \"kubernetes.io/projected/b28a570d-3c2f-43c5-8be6-908d8ecabb08-kube-api-access-wkmhk\") pod \"openstack-cell1-galera-0\" (UID: \"b28a570d-3c2f-43c5-8be6-908d8ecabb08\") " pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.743774 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-kolla-config\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.743876 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-plvtb\" (UniqueName: \"kubernetes.io/projected/7a386b95-6440-43fb-88c4-9e48c2277ca5-kube-api-access-plvtb\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.743960 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-secrets\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.743991 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.744017 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.744032 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.744152 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-config-data-default\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.744196 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.744230 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7a386b95-6440-43fb-88c4-9e48c2277ca5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.744934 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-kolla-config\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.745388 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.745648 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/7a386b95-6440-43fb-88c4-9e48c2277ca5-config-data-generated\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.745407 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-config-data-default\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.746439 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/7a386b95-6440-43fb-88c4-9e48c2277ca5-operator-scripts\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.756381 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.756465 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.766243 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-plvtb\" (UniqueName: \"kubernetes.io/projected/7a386b95-6440-43fb-88c4-9e48c2277ca5-kube-api-access-plvtb\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.774740 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secrets\" (UniqueName: \"kubernetes.io/secret/7a386b95-6440-43fb-88c4-9e48c2277ca5-secrets\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.782601 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"7a386b95-6440-43fb-88c4-9e48c2277ca5\") " pod="openstack/openstack-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.821111 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Oct 03 13:08:29 crc kubenswrapper[4868]: I1003 13:08:29.900091 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.112556 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.114012 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.118865 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-5qcvm" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.119108 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.119230 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.134681 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.152289 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9r2m\" (UniqueName: \"kubernetes.io/projected/e07cfc6a-c6f0-448f-a710-ead7d29c4619-kube-api-access-p9r2m\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.152434 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e07cfc6a-c6f0-448f-a710-ead7d29c4619-kolla-config\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.152569 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07cfc6a-c6f0-448f-a710-ead7d29c4619-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.152703 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e07cfc6a-c6f0-448f-a710-ead7d29c4619-config-data\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.152840 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07cfc6a-c6f0-448f-a710-ead7d29c4619-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.254167 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e07cfc6a-c6f0-448f-a710-ead7d29c4619-kolla-config\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.254292 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07cfc6a-c6f0-448f-a710-ead7d29c4619-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.254332 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e07cfc6a-c6f0-448f-a710-ead7d29c4619-config-data\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.254364 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07cfc6a-c6f0-448f-a710-ead7d29c4619-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.254430 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9r2m\" (UniqueName: \"kubernetes.io/projected/e07cfc6a-c6f0-448f-a710-ead7d29c4619-kube-api-access-p9r2m\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.255598 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e07cfc6a-c6f0-448f-a710-ead7d29c4619-config-data\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.255669 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e07cfc6a-c6f0-448f-a710-ead7d29c4619-kolla-config\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.259798 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/e07cfc6a-c6f0-448f-a710-ead7d29c4619-memcached-tls-certs\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.262245 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e07cfc6a-c6f0-448f-a710-ead7d29c4619-combined-ca-bundle\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.276740 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9r2m\" (UniqueName: \"kubernetes.io/projected/e07cfc6a-c6f0-448f-a710-ead7d29c4619-kube-api-access-p9r2m\") pod \"memcached-0\" (UID: \"e07cfc6a-c6f0-448f-a710-ead7d29c4619\") " pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: I1003 13:08:30.441856 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Oct 03 13:08:30 crc kubenswrapper[4868]: W1003 13:08:30.563180 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7fb365e_bac6_4259_a7d7_3fb743ed87a5.slice/crio-1a9966ba245f03c43ae8ad3752aefe932a5d0861c89feca88c90dc94f7bce400 WatchSource:0}: Error finding container 1a9966ba245f03c43ae8ad3752aefe932a5d0861c89feca88c90dc94f7bce400: Status 404 returned error can't find the container with id 1a9966ba245f03c43ae8ad3752aefe932a5d0861c89feca88c90dc94f7bce400 Oct 03 13:08:31 crc kubenswrapper[4868]: I1003 13:08:31.192652 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" event={"ID":"b7fb365e-bac6-4259-a7d7-3fb743ed87a5","Type":"ContainerStarted","Data":"1a9966ba245f03c43ae8ad3752aefe932a5d0861c89feca88c90dc94f7bce400"} Oct 03 13:08:31 crc kubenswrapper[4868]: I1003 13:08:31.917875 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:08:31 crc kubenswrapper[4868]: I1003 13:08:31.919320 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:08:31 crc kubenswrapper[4868]: I1003 13:08:31.927649 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-68rmv" Oct 03 13:08:31 crc kubenswrapper[4868]: I1003 13:08:31.942500 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:08:32 crc kubenswrapper[4868]: I1003 13:08:32.087082 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkhhh\" (UniqueName: \"kubernetes.io/projected/5e8d2299-d5c9-413e-bb20-ace3833587fb-kube-api-access-hkhhh\") pod \"kube-state-metrics-0\" (UID: \"5e8d2299-d5c9-413e-bb20-ace3833587fb\") " pod="openstack/kube-state-metrics-0" Oct 03 13:08:32 crc kubenswrapper[4868]: I1003 13:08:32.188576 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkhhh\" (UniqueName: \"kubernetes.io/projected/5e8d2299-d5c9-413e-bb20-ace3833587fb-kube-api-access-hkhhh\") pod \"kube-state-metrics-0\" (UID: \"5e8d2299-d5c9-413e-bb20-ace3833587fb\") " pod="openstack/kube-state-metrics-0" Oct 03 13:08:32 crc kubenswrapper[4868]: I1003 13:08:32.219907 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkhhh\" (UniqueName: \"kubernetes.io/projected/5e8d2299-d5c9-413e-bb20-ace3833587fb-kube-api-access-hkhhh\") pod \"kube-state-metrics-0\" (UID: \"5e8d2299-d5c9-413e-bb20-ace3833587fb\") " pod="openstack/kube-state-metrics-0" Oct 03 13:08:32 crc kubenswrapper[4868]: I1003 13:08:32.240750 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.094909 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-w4z7q"] Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.098920 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.101945 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.102020 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-7rg4w" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.103923 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.105410 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-w4z7q"] Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.112409 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-5lxjj"] Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.114590 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.136455 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5lxjj"] Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172134 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-etc-ovs\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172204 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-run\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172260 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-log\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172289 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms8cr\" (UniqueName: \"kubernetes.io/projected/75727504-0a62-4459-add3-419d244f05ff-kube-api-access-ms8cr\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172323 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-scripts\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172341 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-lib\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172359 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75727504-0a62-4459-add3-419d244f05ff-scripts\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172378 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-run\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172399 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-log-ovn\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172423 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75727504-0a62-4459-add3-419d244f05ff-combined-ca-bundle\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172444 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/75727504-0a62-4459-add3-419d244f05ff-ovn-controller-tls-certs\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172462 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crjqn\" (UniqueName: \"kubernetes.io/projected/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-kube-api-access-crjqn\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.172476 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-run-ovn\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.216815 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.219769 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.224285 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.224638 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.234993 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.247601 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.247837 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-tfrcb" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.261751 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279012 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-etc-ovs\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279078 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-run\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279131 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-log\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279163 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms8cr\" (UniqueName: \"kubernetes.io/projected/75727504-0a62-4459-add3-419d244f05ff-kube-api-access-ms8cr\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279208 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-scripts\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279238 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-lib\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279266 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75727504-0a62-4459-add3-419d244f05ff-scripts\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279293 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-run\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279315 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-log-ovn\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279343 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75727504-0a62-4459-add3-419d244f05ff-combined-ca-bundle\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279372 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/75727504-0a62-4459-add3-419d244f05ff-ovn-controller-tls-certs\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279398 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crjqn\" (UniqueName: \"kubernetes.io/projected/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-kube-api-access-crjqn\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.279419 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-run-ovn\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.280013 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-run-ovn\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.280164 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-etc-ovs\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.280253 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-run\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.280362 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-log\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.284651 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/75727504-0a62-4459-add3-419d244f05ff-var-log-ovn\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.284806 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-lib\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.287333 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-scripts\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.295392 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-var-run\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.298816 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/75727504-0a62-4459-add3-419d244f05ff-scripts\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.304374 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75727504-0a62-4459-add3-419d244f05ff-combined-ca-bundle\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.322016 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crjqn\" (UniqueName: \"kubernetes.io/projected/c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24-kube-api-access-crjqn\") pod \"ovn-controller-ovs-5lxjj\" (UID: \"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24\") " pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.332018 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/75727504-0a62-4459-add3-419d244f05ff-ovn-controller-tls-certs\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.374194 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms8cr\" (UniqueName: \"kubernetes.io/projected/75727504-0a62-4459-add3-419d244f05ff-kube-api-access-ms8cr\") pod \"ovn-controller-w4z7q\" (UID: \"75727504-0a62-4459-add3-419d244f05ff\") " pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.386401 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.387493 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.387691 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz654\" (UniqueName: \"kubernetes.io/projected/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-kube-api-access-jz654\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.387800 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.387875 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-config\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.387974 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.388032 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.388178 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.431793 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.448873 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.489997 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.490197 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.490303 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.490341 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.490430 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.490468 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz654\" (UniqueName: \"kubernetes.io/projected/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-kube-api-access-jz654\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.490497 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.490527 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-config\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.491675 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-config\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.492730 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.492947 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.494773 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.499233 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.499295 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.499962 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.526928 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz654\" (UniqueName: \"kubernetes.io/projected/27389bc5-0ed5-44b7-8061-fe3a9567ad3e-kube-api-access-jz654\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.537317 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"ovsdbserver-nb-0\" (UID: \"27389bc5-0ed5-44b7-8061-fe3a9567ad3e\") " pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:36 crc kubenswrapper[4868]: I1003 13:08:36.838864 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.792146 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.795432 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.798945 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.799043 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.799394 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.799443 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-gc89w" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.810028 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.943816 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.943909 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1cc3db3f-e498-4f45-86bb-25781ae2f282-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.943974 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cc3db3f-e498-4f45-86bb-25781ae2f282-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.944004 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.944073 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.944504 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cc3db3f-e498-4f45-86bb-25781ae2f282-config\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.944689 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfvw5\" (UniqueName: \"kubernetes.io/projected/1cc3db3f-e498-4f45-86bb-25781ae2f282-kube-api-access-pfvw5\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:38 crc kubenswrapper[4868]: I1003 13:08:38.945223 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.047764 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.047941 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.047984 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1cc3db3f-e498-4f45-86bb-25781ae2f282-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.048080 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cc3db3f-e498-4f45-86bb-25781ae2f282-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.048105 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.048148 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.048203 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cc3db3f-e498-4f45-86bb-25781ae2f282-config\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.048255 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfvw5\" (UniqueName: \"kubernetes.io/projected/1cc3db3f-e498-4f45-86bb-25781ae2f282-kube-api-access-pfvw5\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.049187 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1cc3db3f-e498-4f45-86bb-25781ae2f282-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.049999 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1cc3db3f-e498-4f45-86bb-25781ae2f282-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.050200 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.051009 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cc3db3f-e498-4f45-86bb-25781ae2f282-config\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.056969 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.056991 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.057509 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1cc3db3f-e498-4f45-86bb-25781ae2f282-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.069956 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfvw5\" (UniqueName: \"kubernetes.io/projected/1cc3db3f-e498-4f45-86bb-25781ae2f282-kube-api-access-pfvw5\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.104558 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1cc3db3f-e498-4f45-86bb-25781ae2f282\") " pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:39 crc kubenswrapper[4868]: I1003 13:08:39.125480 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Oct 03 13:08:46 crc kubenswrapper[4868]: I1003 13:08:46.222024 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.736522 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.736989 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-c2xln,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-qpj9d_openstack(b7fb365e-bac6-4259-a7d7-3fb743ed87a5): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.738598 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.753732 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.754001 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gp922,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-d8xhw_openstack(c6c5e289-55a0-4970-b10f-1ac2fbd49039): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.755343 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" podUID="c6c5e289-55a0-4970-b10f-1ac2fbd49039" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.794077 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.794360 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wkpdc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-6d6hl_openstack(5b33b7b6-6204-48fa-9dea-cf7d3127cd79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.795725 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" podUID="5b33b7b6-6204-48fa-9dea-cf7d3127cd79" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.819588 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.819772 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-d2g6n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-x99j6_openstack(d8085517-4ee6-470e-b3f6-4e993c096a8f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:08:46 crc kubenswrapper[4868]: E1003 13:08:46.820982 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" podUID="d8085517-4ee6-470e-b3f6-4e993c096a8f" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.296816 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.320745 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.324601 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd","Type":"ContainerStarted","Data":"9e2261bd2ea9f14b5ce473529009684ee542aa1485e35ab9fa3810848ac50fbf"} Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.326697 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7a386b95-6440-43fb-88c4-9e48c2277ca5","Type":"ContainerStarted","Data":"3a33e313a87aef939fba43c69b6a84856decbefe12002bde4cada0cacccd0d3f"} Oct 03 13:08:47 crc kubenswrapper[4868]: W1003 13:08:47.326925 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e1f1515_2197_4124_83dc_382a70cd3e91.slice/crio-1e12d04d3f21e5044157d4e03346e3d6239c0f08648a8b6e66f50ea03da433bd WatchSource:0}: Error finding container 1e12d04d3f21e5044157d4e03346e3d6239c0f08648a8b6e66f50ea03da433bd: Status 404 returned error can't find the container with id 1e12d04d3f21e5044157d4e03346e3d6239c0f08648a8b6e66f50ea03da433bd Oct 03 13:08:47 crc kubenswrapper[4868]: E1003 13:08:47.328441 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" podUID="5b33b7b6-6204-48fa-9dea-cf7d3127cd79" Oct 03 13:08:47 crc kubenswrapper[4868]: E1003 13:08:47.329692 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.628284 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.644559 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-w4z7q"] Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.661121 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.690649 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.802836 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.803250 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Oct 03 13:08:47 crc kubenswrapper[4868]: W1003 13:08:47.805423 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27389bc5_0ed5_44b7_8061_fe3a9567ad3e.slice/crio-f58820ea1df6d75465bc436567cb47e37bcd8c6f67a4dc72204f1b988fd62ebe WatchSource:0}: Error finding container f58820ea1df6d75465bc436567cb47e37bcd8c6f67a4dc72204f1b988fd62ebe: Status 404 returned error can't find the container with id f58820ea1df6d75465bc436567cb47e37bcd8c6f67a4dc72204f1b988fd62ebe Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.808449 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.907223 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gp922\" (UniqueName: \"kubernetes.io/projected/c6c5e289-55a0-4970-b10f-1ac2fbd49039-kube-api-access-gp922\") pod \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.907338 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5e289-55a0-4970-b10f-1ac2fbd49039-config\") pod \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\" (UID: \"c6c5e289-55a0-4970-b10f-1ac2fbd49039\") " Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.907385 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d2g6n\" (UniqueName: \"kubernetes.io/projected/d8085517-4ee6-470e-b3f6-4e993c096a8f-kube-api-access-d2g6n\") pod \"d8085517-4ee6-470e-b3f6-4e993c096a8f\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.907418 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-dns-svc\") pod \"d8085517-4ee6-470e-b3f6-4e993c096a8f\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.907485 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-config\") pod \"d8085517-4ee6-470e-b3f6-4e993c096a8f\" (UID: \"d8085517-4ee6-470e-b3f6-4e993c096a8f\") " Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.908137 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c6c5e289-55a0-4970-b10f-1ac2fbd49039-config" (OuterVolumeSpecName: "config") pod "c6c5e289-55a0-4970-b10f-1ac2fbd49039" (UID: "c6c5e289-55a0-4970-b10f-1ac2fbd49039"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.908222 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "d8085517-4ee6-470e-b3f6-4e993c096a8f" (UID: "d8085517-4ee6-470e-b3f6-4e993c096a8f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.908299 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-config" (OuterVolumeSpecName: "config") pod "d8085517-4ee6-470e-b3f6-4e993c096a8f" (UID: "d8085517-4ee6-470e-b3f6-4e993c096a8f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.915714 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6c5e289-55a0-4970-b10f-1ac2fbd49039-kube-api-access-gp922" (OuterVolumeSpecName: "kube-api-access-gp922") pod "c6c5e289-55a0-4970-b10f-1ac2fbd49039" (UID: "c6c5e289-55a0-4970-b10f-1ac2fbd49039"). InnerVolumeSpecName "kube-api-access-gp922". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:08:47 crc kubenswrapper[4868]: I1003 13:08:47.920584 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8085517-4ee6-470e-b3f6-4e993c096a8f-kube-api-access-d2g6n" (OuterVolumeSpecName: "kube-api-access-d2g6n") pod "d8085517-4ee6-470e-b3f6-4e993c096a8f" (UID: "d8085517-4ee6-470e-b3f6-4e993c096a8f"). InnerVolumeSpecName "kube-api-access-d2g6n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.009614 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gp922\" (UniqueName: \"kubernetes.io/projected/c6c5e289-55a0-4970-b10f-1ac2fbd49039-kube-api-access-gp922\") on node \"crc\" DevicePath \"\"" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.009655 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c6c5e289-55a0-4970-b10f-1ac2fbd49039-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.009667 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d2g6n\" (UniqueName: \"kubernetes.io/projected/d8085517-4ee6-470e-b3f6-4e993c096a8f-kube-api-access-d2g6n\") on node \"crc\" DevicePath \"\"" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.009682 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.009693 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8085517-4ee6-470e-b3f6-4e993c096a8f-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.344667 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b28a570d-3c2f-43c5-8be6-908d8ecabb08","Type":"ContainerStarted","Data":"7d386d553547631bf4d2945b5bc92a3294f596de35e9d74413dd82c061a6a50a"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.347343 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"e07cfc6a-c6f0-448f-a710-ead7d29c4619","Type":"ContainerStarted","Data":"1994ecad1c6fdfd602c0487e48c745db5ca9820ffcd6e29b91ed4c70e8e56d1b"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.349228 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e8d2299-d5c9-413e-bb20-ace3833587fb","Type":"ContainerStarted","Data":"9626a20f387247ef9c62603e51b04ad6da3232279efa86539b1cef1fdec52553"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.351108 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"27389bc5-0ed5-44b7-8061-fe3a9567ad3e","Type":"ContainerStarted","Data":"f58820ea1df6d75465bc436567cb47e37bcd8c6f67a4dc72204f1b988fd62ebe"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.354474 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8e1f1515-2197-4124-83dc-382a70cd3e91","Type":"ContainerStarted","Data":"1e12d04d3f21e5044157d4e03346e3d6239c0f08648a8b6e66f50ea03da433bd"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.356635 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q" event={"ID":"75727504-0a62-4459-add3-419d244f05ff","Type":"ContainerStarted","Data":"cf6b691d9a01cc4f00e87de1d3ee04f6bd86dc8a15b2bfaffe9e5685ea5346b4"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.360580 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" event={"ID":"d8085517-4ee6-470e-b3f6-4e993c096a8f","Type":"ContainerDied","Data":"74bdd44a531bad0b083a34a986f0f4bbe210852e530dfb438dd113fadff2a912"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.360639 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-x99j6" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.365159 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" event={"ID":"c6c5e289-55a0-4970-b10f-1ac2fbd49039","Type":"ContainerDied","Data":"f9c26d7d55fd7ea35ae4209b74e2543888e43f74824c2cd0230f1fab5eed476b"} Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.365193 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-d8xhw" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.434712 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-d8xhw"] Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.441524 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-d8xhw"] Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.459583 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x99j6"] Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.500421 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-x99j6"] Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.586035 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6c5e289-55a0-4970-b10f-1ac2fbd49039" path="/var/lib/kubelet/pods/c6c5e289-55a0-4970-b10f-1ac2fbd49039/volumes" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.586419 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8085517-4ee6-470e-b3f6-4e993c096a8f" path="/var/lib/kubelet/pods/d8085517-4ee6-470e-b3f6-4e993c096a8f/volumes" Oct 03 13:08:48 crc kubenswrapper[4868]: I1003 13:08:48.829291 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Oct 03 13:08:49 crc kubenswrapper[4868]: I1003 13:08:49.807107 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-5lxjj"] Oct 03 13:08:50 crc kubenswrapper[4868]: W1003 13:08:50.286592 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1cc3db3f_e498_4f45_86bb_25781ae2f282.slice/crio-f4eef527f1251180c79cf63a63244913866099521755ed311249e01d5bbe5905 WatchSource:0}: Error finding container f4eef527f1251180c79cf63a63244913866099521755ed311249e01d5bbe5905: Status 404 returned error can't find the container with id f4eef527f1251180c79cf63a63244913866099521755ed311249e01d5bbe5905 Oct 03 13:08:50 crc kubenswrapper[4868]: W1003 13:08:50.294904 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7e7a9d0_47c6_47b7_b0d1_1fd9b3fb9e24.slice/crio-8809ef1179c3df6d4ea2df2c2f55946935b188d2d2947473533afbe827107332 WatchSource:0}: Error finding container 8809ef1179c3df6d4ea2df2c2f55946935b188d2d2947473533afbe827107332: Status 404 returned error can't find the container with id 8809ef1179c3df6d4ea2df2c2f55946935b188d2d2947473533afbe827107332 Oct 03 13:08:50 crc kubenswrapper[4868]: I1003 13:08:50.387699 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1cc3db3f-e498-4f45-86bb-25781ae2f282","Type":"ContainerStarted","Data":"f4eef527f1251180c79cf63a63244913866099521755ed311249e01d5bbe5905"} Oct 03 13:08:50 crc kubenswrapper[4868]: I1003 13:08:50.389378 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5lxjj" event={"ID":"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24","Type":"ContainerStarted","Data":"8809ef1179c3df6d4ea2df2c2f55946935b188d2d2947473533afbe827107332"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.431840 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q" event={"ID":"75727504-0a62-4459-add3-419d244f05ff","Type":"ContainerStarted","Data":"683dd4cbbfd11591892c84318e5bfe463ca253ae2b1f3bd249666e58bf615879"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.432446 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-w4z7q" Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.433529 4868 generic.go:334] "Generic (PLEG): container finished" podID="c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24" containerID="0e2a98de7570ba2bc67a5601daf4e339f0083e600eca8b7b8b05eabf450ab313" exitCode=0 Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.433603 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5lxjj" event={"ID":"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24","Type":"ContainerDied","Data":"0e2a98de7570ba2bc67a5601daf4e339f0083e600eca8b7b8b05eabf450ab313"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.448949 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7a386b95-6440-43fb-88c4-9e48c2277ca5","Type":"ContainerStarted","Data":"8cbc0c2550be20cd951a5700d54e7b65c9b311315e91a42676203876366d438a"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.451377 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b28a570d-3c2f-43c5-8be6-908d8ecabb08","Type":"ContainerStarted","Data":"1882ba17978deeb60ed7dde8282dad7fb36ff86b6742a5c8071dfa24bd5397d9"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.456942 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-w4z7q" podStartSLOduration=13.269277373 podStartE2EDuration="20.456927901s" podCreationTimestamp="2025-10-03 13:08:36 +0000 UTC" firstStartedPulling="2025-10-03 13:08:47.660132577 +0000 UTC m=+1123.869981643" lastFinishedPulling="2025-10-03 13:08:54.847783095 +0000 UTC m=+1131.057632171" observedRunningTime="2025-10-03 13:08:56.455323288 +0000 UTC m=+1132.665172364" watchObservedRunningTime="2025-10-03 13:08:56.456927901 +0000 UTC m=+1132.666776967" Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.459065 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"e07cfc6a-c6f0-448f-a710-ead7d29c4619","Type":"ContainerStarted","Data":"13a0c9b4f8de9afdb07c5125272cb7f8d89c3001aac71b1fbe28b210a6011365"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.459204 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.461626 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e8d2299-d5c9-413e-bb20-ace3833587fb","Type":"ContainerStarted","Data":"fe69a2935c5d64392a9ab31b1c0d0cc2fa16ff4c993299e61227ddf60b7d3eef"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.461739 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.463414 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1cc3db3f-e498-4f45-86bb-25781ae2f282","Type":"ContainerStarted","Data":"5906139b673460a4743676766002515a697d22f437c3de7712e44b704c7bd29a"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.466317 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"27389bc5-0ed5-44b7-8061-fe3a9567ad3e","Type":"ContainerStarted","Data":"13e837ef6557337b6f04ea148704c2022e545c6ae893f4575d6d4509eb13b549"} Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.565858 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=17.840291651 podStartE2EDuration="25.565839263s" podCreationTimestamp="2025-10-03 13:08:31 +0000 UTC" firstStartedPulling="2025-10-03 13:08:47.706353632 +0000 UTC m=+1123.916202688" lastFinishedPulling="2025-10-03 13:08:55.431901224 +0000 UTC m=+1131.641750300" observedRunningTime="2025-10-03 13:08:56.54255637 +0000 UTC m=+1132.752405436" watchObservedRunningTime="2025-10-03 13:08:56.565839263 +0000 UTC m=+1132.775688329" Oct 03 13:08:56 crc kubenswrapper[4868]: I1003 13:08:56.572095 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=19.640638862 podStartE2EDuration="26.57207987s" podCreationTimestamp="2025-10-03 13:08:30 +0000 UTC" firstStartedPulling="2025-10-03 13:08:47.654027154 +0000 UTC m=+1123.863876220" lastFinishedPulling="2025-10-03 13:08:54.585468162 +0000 UTC m=+1130.795317228" observedRunningTime="2025-10-03 13:08:56.559595767 +0000 UTC m=+1132.769444843" watchObservedRunningTime="2025-10-03 13:08:56.57207987 +0000 UTC m=+1132.781928936" Oct 03 13:08:57 crc kubenswrapper[4868]: I1003 13:08:57.478954 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5lxjj" event={"ID":"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24","Type":"ContainerStarted","Data":"e24b74fe60f551802214bca30fb7a5d940e6e241efd834f5a65be375bf9f73f3"} Oct 03 13:08:57 crc kubenswrapper[4868]: I1003 13:08:57.479439 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:57 crc kubenswrapper[4868]: I1003 13:08:57.479451 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:08:57 crc kubenswrapper[4868]: I1003 13:08:57.479460 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-5lxjj" event={"ID":"c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24","Type":"ContainerStarted","Data":"a2e83e9db7b0b6864b36c80c4689996f8b1a48099babb5b9ff0e4cbe06a74137"} Oct 03 13:08:57 crc kubenswrapper[4868]: I1003 13:08:57.482040 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd","Type":"ContainerStarted","Data":"50e1b41735c10e406a5cb897fadf8e51579bbb8ae1b7b47347c387d79cde3bf1"} Oct 03 13:08:57 crc kubenswrapper[4868]: I1003 13:08:57.483791 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8e1f1515-2197-4124-83dc-382a70cd3e91","Type":"ContainerStarted","Data":"625f04b3a97858bf53e28d9d473c9007bd6767b393dbbec4bff35073cc8d592f"} Oct 03 13:08:57 crc kubenswrapper[4868]: I1003 13:08:57.505228 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-5lxjj" podStartSLOduration=17.218657094 podStartE2EDuration="21.505206961s" podCreationTimestamp="2025-10-03 13:08:36 +0000 UTC" firstStartedPulling="2025-10-03 13:08:50.298820722 +0000 UTC m=+1126.508669788" lastFinishedPulling="2025-10-03 13:08:54.585370599 +0000 UTC m=+1130.795219655" observedRunningTime="2025-10-03 13:08:57.498527962 +0000 UTC m=+1133.708377028" watchObservedRunningTime="2025-10-03 13:08:57.505206961 +0000 UTC m=+1133.715056037" Oct 03 13:08:59 crc kubenswrapper[4868]: I1003 13:08:59.511043 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1cc3db3f-e498-4f45-86bb-25781ae2f282","Type":"ContainerStarted","Data":"12b35a0b41ccc2251902bad126c5c49294102fd279f5096c61325b811c4fb07f"} Oct 03 13:08:59 crc kubenswrapper[4868]: I1003 13:08:59.513844 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"27389bc5-0ed5-44b7-8061-fe3a9567ad3e","Type":"ContainerStarted","Data":"6e68abd4f9d914dd5290973a54b6e6a5ff2924fbe25240ab97eafdcd99bc7808"} Oct 03 13:08:59 crc kubenswrapper[4868]: I1003 13:08:59.542000 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=13.775605861 podStartE2EDuration="22.541978801s" podCreationTimestamp="2025-10-03 13:08:37 +0000 UTC" firstStartedPulling="2025-10-03 13:08:50.290358146 +0000 UTC m=+1126.500207212" lastFinishedPulling="2025-10-03 13:08:59.056731086 +0000 UTC m=+1135.266580152" observedRunningTime="2025-10-03 13:08:59.535848766 +0000 UTC m=+1135.745697862" watchObservedRunningTime="2025-10-03 13:08:59.541978801 +0000 UTC m=+1135.751827857" Oct 03 13:08:59 crc kubenswrapper[4868]: I1003 13:08:59.561587 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=13.33152861 podStartE2EDuration="24.561569805s" podCreationTimestamp="2025-10-03 13:08:35 +0000 UTC" firstStartedPulling="2025-10-03 13:08:47.807732304 +0000 UTC m=+1124.017581370" lastFinishedPulling="2025-10-03 13:08:59.037773499 +0000 UTC m=+1135.247622565" observedRunningTime="2025-10-03 13:08:59.55916096 +0000 UTC m=+1135.769010186" watchObservedRunningTime="2025-10-03 13:08:59.561569805 +0000 UTC m=+1135.771418871" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.126040 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.163686 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.445667 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.524528 4868 generic.go:334] "Generic (PLEG): container finished" podID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerID="0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645" exitCode=0 Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.525068 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" event={"ID":"b7fb365e-bac6-4259-a7d7-3fb743ed87a5","Type":"ContainerDied","Data":"0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645"} Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.526158 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.587937 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.843306 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.886905 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qpj9d"] Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.932016 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-vm65s"] Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.933700 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.934900 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.936469 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.943856 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-x4xch"] Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.945385 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.947671 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.954600 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-x4xch"] Oct 03 13:09:00 crc kubenswrapper[4868]: I1003 13:09:00.961251 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-vm65s"] Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032554 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/939a9e63-6947-478c-8a13-75d46852cf89-ovn-rundir\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032603 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939a9e63-6947-478c-8a13-75d46852cf89-combined-ca-bundle\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032636 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-92pxw\" (UniqueName: \"kubernetes.io/projected/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-kube-api-access-92pxw\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032668 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-config\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032690 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/939a9e63-6947-478c-8a13-75d46852cf89-ovs-rundir\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032722 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032799 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/939a9e63-6947-478c-8a13-75d46852cf89-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032872 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.032987 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939a9e63-6947-478c-8a13-75d46852cf89-config\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.033031 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2rt8\" (UniqueName: \"kubernetes.io/projected/939a9e63-6947-478c-8a13-75d46852cf89-kube-api-access-g2rt8\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.135581 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/939a9e63-6947-478c-8a13-75d46852cf89-ovs-rundir\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.135667 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.135699 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/939a9e63-6947-478c-8a13-75d46852cf89-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.136069 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/939a9e63-6947-478c-8a13-75d46852cf89-ovs-rundir\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.136868 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-dns-svc\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.138642 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.138885 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939a9e63-6947-478c-8a13-75d46852cf89-config\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139020 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2rt8\" (UniqueName: \"kubernetes.io/projected/939a9e63-6947-478c-8a13-75d46852cf89-kube-api-access-g2rt8\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139142 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/939a9e63-6947-478c-8a13-75d46852cf89-ovn-rundir\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139173 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939a9e63-6947-478c-8a13-75d46852cf89-combined-ca-bundle\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139207 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-92pxw\" (UniqueName: \"kubernetes.io/projected/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-kube-api-access-92pxw\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139266 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-config\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139483 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/939a9e63-6947-478c-8a13-75d46852cf89-ovn-rundir\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139589 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-ovsdbserver-sb\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.139710 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/939a9e63-6947-478c-8a13-75d46852cf89-config\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.140177 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-config\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.144644 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/939a9e63-6947-478c-8a13-75d46852cf89-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.144886 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/939a9e63-6947-478c-8a13-75d46852cf89-combined-ca-bundle\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.156461 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-92pxw\" (UniqueName: \"kubernetes.io/projected/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-kube-api-access-92pxw\") pod \"dnsmasq-dns-7f896c8c65-vm65s\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.157350 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2rt8\" (UniqueName: \"kubernetes.io/projected/939a9e63-6947-478c-8a13-75d46852cf89-kube-api-access-g2rt8\") pod \"ovn-controller-metrics-x4xch\" (UID: \"939a9e63-6947-478c-8a13-75d46852cf89\") " pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.311617 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6d6hl"] Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.344964 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9hdst"] Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.346591 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.350723 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.360713 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9hdst"] Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.401333 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-x4xch" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.401727 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.452213 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.452355 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-config\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.452424 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.452444 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.452476 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pmrst\" (UniqueName: \"kubernetes.io/projected/c592801a-34fa-484e-bca5-31f2a53a64f7-kube-api-access-pmrst\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.547433 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" event={"ID":"b7fb365e-bac6-4259-a7d7-3fb743ed87a5","Type":"ContainerStarted","Data":"2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3"} Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.547646 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerName="dnsmasq-dns" containerID="cri-o://2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3" gracePeriod=10 Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.547945 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.557811 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.557848 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.557879 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pmrst\" (UniqueName: \"kubernetes.io/projected/c592801a-34fa-484e-bca5-31f2a53a64f7-kube-api-access-pmrst\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.557902 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.557963 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-config\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.558675 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-config\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.558805 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.559075 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.559290 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.560344 4868 generic.go:334] "Generic (PLEG): container finished" podID="7a386b95-6440-43fb-88c4-9e48c2277ca5" containerID="8cbc0c2550be20cd951a5700d54e7b65c9b311315e91a42676203876366d438a" exitCode=0 Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.560419 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7a386b95-6440-43fb-88c4-9e48c2277ca5","Type":"ContainerDied","Data":"8cbc0c2550be20cd951a5700d54e7b65c9b311315e91a42676203876366d438a"} Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.572397 4868 generic.go:334] "Generic (PLEG): container finished" podID="b28a570d-3c2f-43c5-8be6-908d8ecabb08" containerID="1882ba17978deeb60ed7dde8282dad7fb36ff86b6742a5c8071dfa24bd5397d9" exitCode=0 Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.572479 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b28a570d-3c2f-43c5-8be6-908d8ecabb08","Type":"ContainerDied","Data":"1882ba17978deeb60ed7dde8282dad7fb36ff86b6742a5c8071dfa24bd5397d9"} Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.578734 4868 generic.go:334] "Generic (PLEG): container finished" podID="5b33b7b6-6204-48fa-9dea-cf7d3127cd79" containerID="2cd038986dccea2621e15e3da75650ea5b90b35c3c2ebd1f9e5e2c3d6c927f6b" exitCode=0 Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.579067 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" event={"ID":"5b33b7b6-6204-48fa-9dea-cf7d3127cd79","Type":"ContainerDied","Data":"2cd038986dccea2621e15e3da75650ea5b90b35c3c2ebd1f9e5e2c3d6c927f6b"} Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.579509 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.584831 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" podStartSLOduration=7.560608753 podStartE2EDuration="36.584806403s" podCreationTimestamp="2025-10-03 13:08:25 +0000 UTC" firstStartedPulling="2025-10-03 13:08:30.565560958 +0000 UTC m=+1106.775410024" lastFinishedPulling="2025-10-03 13:08:59.589758608 +0000 UTC m=+1135.799607674" observedRunningTime="2025-10-03 13:09:01.579497641 +0000 UTC m=+1137.789346727" watchObservedRunningTime="2025-10-03 13:09:01.584806403 +0000 UTC m=+1137.794655469" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.599214 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pmrst\" (UniqueName: \"kubernetes.io/projected/c592801a-34fa-484e-bca5-31f2a53a64f7-kube-api-access-pmrst\") pod \"dnsmasq-dns-86db49b7ff-9hdst\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.675770 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.693428 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.889932 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.891999 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.896252 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-lldrk" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.896896 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.896984 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.897113 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Oct 03 13:09:01 crc kubenswrapper[4868]: I1003 13:09:01.906030 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.082000 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26ca4d65-6de7-490b-9492-c9fd70fe37b5-scripts\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.082109 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.082160 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ca4d65-6de7-490b-9492-c9fd70fe37b5-config\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.082214 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.082251 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxksn\" (UniqueName: \"kubernetes.io/projected/26ca4d65-6de7-490b-9492-c9fd70fe37b5-kube-api-access-qxksn\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.082295 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.082355 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/26ca4d65-6de7-490b-9492-c9fd70fe37b5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.097283 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-vm65s"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.153655 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.153710 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.185702 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxksn\" (UniqueName: \"kubernetes.io/projected/26ca4d65-6de7-490b-9492-c9fd70fe37b5-kube-api-access-qxksn\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.185790 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.185867 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/26ca4d65-6de7-490b-9492-c9fd70fe37b5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.185892 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26ca4d65-6de7-490b-9492-c9fd70fe37b5-scripts\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.185914 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.185958 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ca4d65-6de7-490b-9492-c9fd70fe37b5-config\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.186013 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.187039 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/26ca4d65-6de7-490b-9492-c9fd70fe37b5-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.188078 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/26ca4d65-6de7-490b-9492-c9fd70fe37b5-scripts\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.188338 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/26ca4d65-6de7-490b-9492-c9fd70fe37b5-config\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.195745 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.199742 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.202725 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/26ca4d65-6de7-490b-9492-c9fd70fe37b5-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.237124 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxksn\" (UniqueName: \"kubernetes.io/projected/26ca4d65-6de7-490b-9492-c9fd70fe37b5-kube-api-access-qxksn\") pod \"ovn-northd-0\" (UID: \"26ca4d65-6de7-490b-9492-c9fd70fe37b5\") " pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.256217 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.285108 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.310034 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-x4xch"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.405123 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-vm65s"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.449724 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9hdst"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.449768 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.493523 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.498465 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-7rdpg"] Oct 03 13:09:02 crc kubenswrapper[4868]: E1003 13:09:02.498875 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b33b7b6-6204-48fa-9dea-cf7d3127cd79" containerName="init" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.498888 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b33b7b6-6204-48fa-9dea-cf7d3127cd79" containerName="init" Oct 03 13:09:02 crc kubenswrapper[4868]: E1003 13:09:02.498900 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerName="dnsmasq-dns" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.498906 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerName="dnsmasq-dns" Oct 03 13:09:02 crc kubenswrapper[4868]: E1003 13:09:02.498926 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerName="init" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.498931 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerName="init" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.499153 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b33b7b6-6204-48fa-9dea-cf7d3127cd79" containerName="init" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.499168 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerName="dnsmasq-dns" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.521267 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.523688 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7rdpg"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.594439 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-dns-svc\") pod \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.594494 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-dns-svc\") pod \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.594586 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-config\") pod \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.594641 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2xln\" (UniqueName: \"kubernetes.io/projected/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-kube-api-access-c2xln\") pod \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.594807 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-config\") pod \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\" (UID: \"b7fb365e-bac6-4259-a7d7-3fb743ed87a5\") " Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.594884 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkpdc\" (UniqueName: \"kubernetes.io/projected/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-kube-api-access-wkpdc\") pod \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\" (UID: \"5b33b7b6-6204-48fa-9dea-cf7d3127cd79\") " Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.608849 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-kube-api-access-wkpdc" (OuterVolumeSpecName: "kube-api-access-wkpdc") pod "5b33b7b6-6204-48fa-9dea-cf7d3127cd79" (UID: "5b33b7b6-6204-48fa-9dea-cf7d3127cd79"). InnerVolumeSpecName "kube-api-access-wkpdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.627215 4868 generic.go:334] "Generic (PLEG): container finished" podID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" containerID="2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3" exitCode=0 Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.627328 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.628072 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-kube-api-access-c2xln" (OuterVolumeSpecName: "kube-api-access-c2xln") pod "b7fb365e-bac6-4259-a7d7-3fb743ed87a5" (UID: "b7fb365e-bac6-4259-a7d7-3fb743ed87a5"). InnerVolumeSpecName "kube-api-access-c2xln". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.643005 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-config" (OuterVolumeSpecName: "config") pod "5b33b7b6-6204-48fa-9dea-cf7d3127cd79" (UID: "5b33b7b6-6204-48fa-9dea-cf7d3127cd79"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.660030 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" event={"ID":"c592801a-34fa-484e-bca5-31f2a53a64f7","Type":"ContainerStarted","Data":"48d9c1e8a5b950c2bb51017fe3ed9a21c671f26e81d7e426786ac7ffe4c04543"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.660105 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x4xch" event={"ID":"939a9e63-6947-478c-8a13-75d46852cf89","Type":"ContainerStarted","Data":"eb310fa754f72f9d8d581ca01e43686832536b52af47838ef424c223e6d5acd8"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.660120 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" event={"ID":"b7fb365e-bac6-4259-a7d7-3fb743ed87a5","Type":"ContainerDied","Data":"2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.661733 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b7fb365e-bac6-4259-a7d7-3fb743ed87a5" (UID: "b7fb365e-bac6-4259-a7d7-3fb743ed87a5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.662246 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qpj9d" event={"ID":"b7fb365e-bac6-4259-a7d7-3fb743ed87a5","Type":"ContainerDied","Data":"1a9966ba245f03c43ae8ad3752aefe932a5d0861c89feca88c90dc94f7bce400"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.662300 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" event={"ID":"316a0ee4-5745-4f86-9d0d-4730f8a5e78a","Type":"ContainerStarted","Data":"b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.662324 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" event={"ID":"316a0ee4-5745-4f86-9d0d-4730f8a5e78a","Type":"ContainerStarted","Data":"1a319beced80b669b467b4c319d83df62119fe2289f0d1cd310a3322b8719e3c"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.662342 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"7a386b95-6440-43fb-88c4-9e48c2277ca5","Type":"ContainerStarted","Data":"5f2f48fa35131373994d088d2b7bdef602258fbfcd2a10bd4cf5de61e26deb1f"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.662358 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"b28a570d-3c2f-43c5-8be6-908d8ecabb08","Type":"ContainerStarted","Data":"0ef91817b7d78fd737d1c2ae805ace0b9aae0da558517b6a17186b376dee9107"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.662788 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.663955 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5b33b7b6-6204-48fa-9dea-cf7d3127cd79" (UID: "5b33b7b6-6204-48fa-9dea-cf7d3127cd79"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.664207 4868 scope.go:117] "RemoveContainer" containerID="2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.663960 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-6d6hl" event={"ID":"5b33b7b6-6204-48fa-9dea-cf7d3127cd79","Type":"ContainerDied","Data":"061ca7ef95b6e028055c84b7d0f0f90070f6ec2388175f2b70ff24b64a2dff55"} Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.709385 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-config\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.709894 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=27.807004201 podStartE2EDuration="34.709859804s" podCreationTimestamp="2025-10-03 13:08:28 +0000 UTC" firstStartedPulling="2025-10-03 13:08:47.682576368 +0000 UTC m=+1123.892425434" lastFinishedPulling="2025-10-03 13:08:54.585431971 +0000 UTC m=+1130.795281037" observedRunningTime="2025-10-03 13:09:02.706850934 +0000 UTC m=+1138.916700000" watchObservedRunningTime="2025-10-03 13:09:02.709859804 +0000 UTC m=+1138.919708880" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711203 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711334 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cvhvf\" (UniqueName: \"kubernetes.io/projected/c47f90b9-aeac-456b-b26f-0cde9556f32f-kube-api-access-cvhvf\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711401 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711425 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-dns-svc\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711631 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkpdc\" (UniqueName: \"kubernetes.io/projected/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-kube-api-access-wkpdc\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711651 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711662 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711671 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b33b7b6-6204-48fa-9dea-cf7d3127cd79-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.711681 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2xln\" (UniqueName: \"kubernetes.io/projected/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-kube-api-access-c2xln\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.720671 4868 scope.go:117] "RemoveContainer" containerID="0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.723167 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-config" (OuterVolumeSpecName: "config") pod "b7fb365e-bac6-4259-a7d7-3fb743ed87a5" (UID: "b7fb365e-bac6-4259-a7d7-3fb743ed87a5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.751171 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=26.74141642 podStartE2EDuration="34.751147849s" podCreationTimestamp="2025-10-03 13:08:28 +0000 UTC" firstStartedPulling="2025-10-03 13:08:46.769988476 +0000 UTC m=+1122.979837542" lastFinishedPulling="2025-10-03 13:08:54.779719905 +0000 UTC m=+1130.989568971" observedRunningTime="2025-10-03 13:09:02.740477053 +0000 UTC m=+1138.950326149" watchObservedRunningTime="2025-10-03 13:09:02.751147849 +0000 UTC m=+1138.960996915" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.756599 4868 scope.go:117] "RemoveContainer" containerID="2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3" Oct 03 13:09:02 crc kubenswrapper[4868]: E1003 13:09:02.757171 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3\": container with ID starting with 2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3 not found: ID does not exist" containerID="2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.757379 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3"} err="failed to get container status \"2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3\": rpc error: code = NotFound desc = could not find container \"2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3\": container with ID starting with 2fd90097a849fa16f7a72ad8b018c0fc209002c60f271f0fed6518e8d05183b3 not found: ID does not exist" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.757415 4868 scope.go:117] "RemoveContainer" containerID="0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645" Oct 03 13:09:02 crc kubenswrapper[4868]: E1003 13:09:02.757782 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645\": container with ID starting with 0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645 not found: ID does not exist" containerID="0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.757835 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645"} err="failed to get container status \"0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645\": rpc error: code = NotFound desc = could not find container \"0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645\": container with ID starting with 0b88a9af569cd94082e780f1a78707258c3b9f5a28b65e8f6f1e97d0f108b645 not found: ID does not exist" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.757854 4868 scope.go:117] "RemoveContainer" containerID="2cd038986dccea2621e15e3da75650ea5b90b35c3c2ebd1f9e5e2c3d6c927f6b" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.812645 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cvhvf\" (UniqueName: \"kubernetes.io/projected/c47f90b9-aeac-456b-b26f-0cde9556f32f-kube-api-access-cvhvf\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.812732 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.812749 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-dns-svc\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.812948 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-config\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.812967 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.814353 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b7fb365e-bac6-4259-a7d7-3fb743ed87a5-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.817136 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-dns-svc\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.817662 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.817656 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.818418 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-config\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.842312 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cvhvf\" (UniqueName: \"kubernetes.io/projected/c47f90b9-aeac-456b-b26f-0cde9556f32f-kube-api-access-cvhvf\") pod \"dnsmasq-dns-698758b865-7rdpg\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.876154 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.951974 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.965865 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.985894 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qpj9d"] Oct 03 13:09:02 crc kubenswrapper[4868]: I1003 13:09:02.990152 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qpj9d"] Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.017924 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-ovsdbserver-sb\") pod \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.018071 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-dns-svc\") pod \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.018190 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-config\") pod \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.018228 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-92pxw\" (UniqueName: \"kubernetes.io/projected/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-kube-api-access-92pxw\") pod \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\" (UID: \"316a0ee4-5745-4f86-9d0d-4730f8a5e78a\") " Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.050690 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-kube-api-access-92pxw" (OuterVolumeSpecName: "kube-api-access-92pxw") pod "316a0ee4-5745-4f86-9d0d-4730f8a5e78a" (UID: "316a0ee4-5745-4f86-9d0d-4730f8a5e78a"). InnerVolumeSpecName "kube-api-access-92pxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.051235 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "316a0ee4-5745-4f86-9d0d-4730f8a5e78a" (UID: "316a0ee4-5745-4f86-9d0d-4730f8a5e78a"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.051744 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "316a0ee4-5745-4f86-9d0d-4730f8a5e78a" (UID: "316a0ee4-5745-4f86-9d0d-4730f8a5e78a"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.051809 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-config" (OuterVolumeSpecName: "config") pod "316a0ee4-5745-4f86-9d0d-4730f8a5e78a" (UID: "316a0ee4-5745-4f86-9d0d-4730f8a5e78a"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.077737 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6d6hl"] Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.085946 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-6d6hl"] Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.120071 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.120119 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-92pxw\" (UniqueName: \"kubernetes.io/projected/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-kube-api-access-92pxw\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.120132 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.120145 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/316a0ee4-5745-4f86-9d0d-4730f8a5e78a-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.380577 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7rdpg"] Oct 03 13:09:03 crc kubenswrapper[4868]: W1003 13:09:03.389577 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc47f90b9_aeac_456b_b26f_0cde9556f32f.slice/crio-bd4a26a43160326e6224322ef4f68c81f7b9224d43eeea946a559d5648ace8e9 WatchSource:0}: Error finding container bd4a26a43160326e6224322ef4f68c81f7b9224d43eeea946a559d5648ace8e9: Status 404 returned error can't find the container with id bd4a26a43160326e6224322ef4f68c81f7b9224d43eeea946a559d5648ace8e9 Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.496295 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.497264 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="316a0ee4-5745-4f86-9d0d-4730f8a5e78a" containerName="init" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.497283 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="316a0ee4-5745-4f86-9d0d-4730f8a5e78a" containerName="init" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.497482 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="316a0ee4-5745-4f86-9d0d-4730f8a5e78a" containerName="init" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.503006 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.505292 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.505873 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-pwrfd" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.505943 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.506118 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.529515 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.633766 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.633855 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.633923 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bb0842a9-9947-4561-af16-154496b90622-cache\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.633983 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bb0842a9-9947-4561-af16-154496b90622-lock\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.634928 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-925zp\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-kube-api-access-925zp\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.665523 4868 generic.go:334] "Generic (PLEG): container finished" podID="316a0ee4-5745-4f86-9d0d-4730f8a5e78a" containerID="b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5" exitCode=0 Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.665589 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" event={"ID":"316a0ee4-5745-4f86-9d0d-4730f8a5e78a","Type":"ContainerDied","Data":"b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5"} Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.665616 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" event={"ID":"316a0ee4-5745-4f86-9d0d-4730f8a5e78a","Type":"ContainerDied","Data":"1a319beced80b669b467b4c319d83df62119fe2289f0d1cd310a3322b8719e3c"} Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.665633 4868 scope.go:117] "RemoveContainer" containerID="b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.665886 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7f896c8c65-vm65s" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.667599 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"26ca4d65-6de7-490b-9492-c9fd70fe37b5","Type":"ContainerStarted","Data":"7e67d72d888cfd214d127cc3d827d1230a26a899d5aea7bab277a24e7223c301"} Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.670653 4868 generic.go:334] "Generic (PLEG): container finished" podID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerID="489da5ee8c8f6825e37f99e7035b212118d302e98f2c2a7d6d7f10a2c67c59e0" exitCode=0 Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.670852 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" event={"ID":"c592801a-34fa-484e-bca5-31f2a53a64f7","Type":"ContainerDied","Data":"489da5ee8c8f6825e37f99e7035b212118d302e98f2c2a7d6d7f10a2c67c59e0"} Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.672722 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-x4xch" event={"ID":"939a9e63-6947-478c-8a13-75d46852cf89","Type":"ContainerStarted","Data":"ba87266cb6155806930d10f3f5d6ae37c85084f33fc24f14f61dca4ba590d6c2"} Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.682327 4868 generic.go:334] "Generic (PLEG): container finished" podID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerID="cc5780bd24d45fdd5665210c06d23ada6a083cd8ae063f74bb50f3248179b724" exitCode=0 Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.682429 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7rdpg" event={"ID":"c47f90b9-aeac-456b-b26f-0cde9556f32f","Type":"ContainerDied","Data":"cc5780bd24d45fdd5665210c06d23ada6a083cd8ae063f74bb50f3248179b724"} Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.682458 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7rdpg" event={"ID":"c47f90b9-aeac-456b-b26f-0cde9556f32f","Type":"ContainerStarted","Data":"bd4a26a43160326e6224322ef4f68c81f7b9224d43eeea946a559d5648ace8e9"} Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.711591 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-x4xch" podStartSLOduration=3.711569269 podStartE2EDuration="3.711569269s" podCreationTimestamp="2025-10-03 13:09:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:09:03.707676394 +0000 UTC m=+1139.917525460" watchObservedRunningTime="2025-10-03 13:09:03.711569269 +0000 UTC m=+1139.921418335" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.741124 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-925zp\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-kube-api-access-925zp\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.741280 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.744298 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.744933 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bb0842a9-9947-4561-af16-154496b90622-cache\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.745022 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bb0842a9-9947-4561-af16-154496b90622-lock\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.748707 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.748952 4868 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.749005 4868 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.749093 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift podName:bb0842a9-9947-4561-af16-154496b90622 nodeName:}" failed. No retries permitted until 2025-10-03 13:09:04.249069891 +0000 UTC m=+1140.458918997 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift") pod "swift-storage-0" (UID: "bb0842a9-9947-4561-af16-154496b90622") : configmap "swift-ring-files" not found Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.749747 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/bb0842a9-9947-4561-af16-154496b90622-cache\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.751271 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/bb0842a9-9947-4561-af16-154496b90622-lock\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.771578 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-925zp\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-kube-api-access-925zp\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.806524 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.812322 4868 scope.go:117] "RemoveContainer" containerID="b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5" Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.817954 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5\": container with ID starting with b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5 not found: ID does not exist" containerID="b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.818010 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5"} err="failed to get container status \"b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5\": rpc error: code = NotFound desc = could not find container \"b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5\": container with ID starting with b059b8ab4881754b2207ab1ea955064e4c15cef49329e44f862b8d2c1ba216e5 not found: ID does not exist" Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.833223 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-vm65s"] Oct 03 13:09:03 crc kubenswrapper[4868]: I1003 13:09:03.838630 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7f896c8c65-vm65s"] Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.906193 4868 log.go:32] "CreateContainer in sandbox from runtime service failed" err=< Oct 03 13:09:03 crc kubenswrapper[4868]: rpc error: code = Unknown desc = container create failed: mount `/var/lib/kubelet/pods/c592801a-34fa-484e-bca5-31f2a53a64f7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 13:09:03 crc kubenswrapper[4868]: > podSandboxID="48d9c1e8a5b950c2bb51017fe3ed9a21c671f26e81d7e426786ac7ffe4c04543" Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.906849 4868 kuberuntime_manager.go:1274] "Unhandled Error" err=< Oct 03 13:09:03 crc kubenswrapper[4868]: container &Container{Name:dnsmasq-dns,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n599h5cbh7ch5d4h66fh676hdbh546h95h88h5ffh55ch7fhch57ch687hddhc7h5fdh57dh674h56fh64ch98h9bh557h55dh646h54ch54fh5c4h597q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-nb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-nb,SubPath:ovsdbserver-nb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ovsdbserver-sb,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/ovsdbserver-sb,SubPath:ovsdbserver-sb,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pmrst,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 5353 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-86db49b7ff-9hdst_openstack(c592801a-34fa-484e-bca5-31f2a53a64f7): CreateContainerError: container create failed: mount `/var/lib/kubelet/pods/c592801a-34fa-484e-bca5-31f2a53a64f7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory Oct 03 13:09:03 crc kubenswrapper[4868]: > logger="UnhandledError" Oct 03 13:09:03 crc kubenswrapper[4868]: E1003 13:09:03.908400 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"dnsmasq-dns\" with CreateContainerError: \"container create failed: mount `/var/lib/kubelet/pods/c592801a-34fa-484e-bca5-31f2a53a64f7/volume-subpaths/dns-svc/dnsmasq-dns/1` to `etc/dnsmasq.d/hosts/dns-svc`: No such file or directory\\n\"" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" Oct 03 13:09:04 crc kubenswrapper[4868]: I1003 13:09:04.261827 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:04 crc kubenswrapper[4868]: E1003 13:09:04.262535 4868 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:09:04 crc kubenswrapper[4868]: E1003 13:09:04.262645 4868 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:09:04 crc kubenswrapper[4868]: E1003 13:09:04.262810 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift podName:bb0842a9-9947-4561-af16-154496b90622 nodeName:}" failed. No retries permitted until 2025-10-03 13:09:05.262781396 +0000 UTC m=+1141.472630462 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift") pod "swift-storage-0" (UID: "bb0842a9-9947-4561-af16-154496b90622") : configmap "swift-ring-files" not found Oct 03 13:09:04 crc kubenswrapper[4868]: I1003 13:09:04.554981 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="316a0ee4-5745-4f86-9d0d-4730f8a5e78a" path="/var/lib/kubelet/pods/316a0ee4-5745-4f86-9d0d-4730f8a5e78a/volumes" Oct 03 13:09:04 crc kubenswrapper[4868]: I1003 13:09:04.555499 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b33b7b6-6204-48fa-9dea-cf7d3127cd79" path="/var/lib/kubelet/pods/5b33b7b6-6204-48fa-9dea-cf7d3127cd79/volumes" Oct 03 13:09:04 crc kubenswrapper[4868]: I1003 13:09:04.555978 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7fb365e-bac6-4259-a7d7-3fb743ed87a5" path="/var/lib/kubelet/pods/b7fb365e-bac6-4259-a7d7-3fb743ed87a5/volumes" Oct 03 13:09:04 crc kubenswrapper[4868]: I1003 13:09:04.707766 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7rdpg" event={"ID":"c47f90b9-aeac-456b-b26f-0cde9556f32f","Type":"ContainerStarted","Data":"e642ea69b42011a7cc9d25ac76a279f4ee2f551567a078e3e121b409930a82ad"} Oct 03 13:09:04 crc kubenswrapper[4868]: I1003 13:09:04.708129 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:04 crc kubenswrapper[4868]: I1003 13:09:04.733631 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-7rdpg" podStartSLOduration=2.731669104 podStartE2EDuration="2.731669104s" podCreationTimestamp="2025-10-03 13:09:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:09:04.72854189 +0000 UTC m=+1140.938390976" watchObservedRunningTime="2025-10-03 13:09:04.731669104 +0000 UTC m=+1140.941518180" Oct 03 13:09:05 crc kubenswrapper[4868]: I1003 13:09:05.277438 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:05 crc kubenswrapper[4868]: E1003 13:09:05.277784 4868 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:09:05 crc kubenswrapper[4868]: E1003 13:09:05.278029 4868 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:09:05 crc kubenswrapper[4868]: E1003 13:09:05.278124 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift podName:bb0842a9-9947-4561-af16-154496b90622 nodeName:}" failed. No retries permitted until 2025-10-03 13:09:07.278102054 +0000 UTC m=+1143.487951120 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift") pod "swift-storage-0" (UID: "bb0842a9-9947-4561-af16-154496b90622") : configmap "swift-ring-files" not found Oct 03 13:09:05 crc kubenswrapper[4868]: I1003 13:09:05.716640 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" event={"ID":"c592801a-34fa-484e-bca5-31f2a53a64f7","Type":"ContainerStarted","Data":"9c567bae4e160d17076cb35dce4c0b046b18571a5b3c6904a58f2c039ff50d1b"} Oct 03 13:09:05 crc kubenswrapper[4868]: I1003 13:09:05.716960 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:05 crc kubenswrapper[4868]: I1003 13:09:05.738481 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" podStartSLOduration=4.738461634 podStartE2EDuration="4.738461634s" podCreationTimestamp="2025-10-03 13:09:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:09:05.734234851 +0000 UTC m=+1141.944083917" watchObservedRunningTime="2025-10-03 13:09:05.738461634 +0000 UTC m=+1141.948310700" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.315410 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:07 crc kubenswrapper[4868]: E1003 13:09:07.315666 4868 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:09:07 crc kubenswrapper[4868]: E1003 13:09:07.315708 4868 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:09:07 crc kubenswrapper[4868]: E1003 13:09:07.315785 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift podName:bb0842a9-9947-4561-af16-154496b90622 nodeName:}" failed. No retries permitted until 2025-10-03 13:09:11.315758968 +0000 UTC m=+1147.525608034 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift") pod "swift-storage-0" (UID: "bb0842a9-9947-4561-af16-154496b90622") : configmap "swift-ring-files" not found Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.455836 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-p5w5j"] Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.457043 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.459298 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.459361 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.459518 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.466196 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-p5w5j"] Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.519442 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-swiftconf\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.519600 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qp7x6\" (UniqueName: \"kubernetes.io/projected/49f095a3-2d77-498e-bacc-3e6c711f4700-kube-api-access-qp7x6\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.519633 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-combined-ca-bundle\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.519672 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/49f095a3-2d77-498e-bacc-3e6c711f4700-etc-swift\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.519794 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-ring-data-devices\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.519862 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-dispersionconf\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.519897 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-scripts\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.622224 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-swiftconf\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.622317 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qp7x6\" (UniqueName: \"kubernetes.io/projected/49f095a3-2d77-498e-bacc-3e6c711f4700-kube-api-access-qp7x6\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.622347 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-combined-ca-bundle\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.622386 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/49f095a3-2d77-498e-bacc-3e6c711f4700-etc-swift\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.622456 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-ring-data-devices\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.622528 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-dispersionconf\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.622565 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-scripts\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.623158 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/49f095a3-2d77-498e-bacc-3e6c711f4700-etc-swift\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.623656 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-scripts\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.623944 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-ring-data-devices\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.628265 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-swiftconf\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.628864 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-combined-ca-bundle\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.630035 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-dispersionconf\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.645689 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qp7x6\" (UniqueName: \"kubernetes.io/projected/49f095a3-2d77-498e-bacc-3e6c711f4700-kube-api-access-qp7x6\") pod \"swift-ring-rebalance-p5w5j\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.777709 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-pwrfd" Oct 03 13:09:07 crc kubenswrapper[4868]: I1003 13:09:07.786591 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:08 crc kubenswrapper[4868]: I1003 13:09:08.226540 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-p5w5j"] Oct 03 13:09:08 crc kubenswrapper[4868]: W1003 13:09:08.237136 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod49f095a3_2d77_498e_bacc_3e6c711f4700.slice/crio-3d5eae1aefb3874bfe2d333209bc42134f0a67f761d437ac2c59fef76e169453 WatchSource:0}: Error finding container 3d5eae1aefb3874bfe2d333209bc42134f0a67f761d437ac2c59fef76e169453: Status 404 returned error can't find the container with id 3d5eae1aefb3874bfe2d333209bc42134f0a67f761d437ac2c59fef76e169453 Oct 03 13:09:08 crc kubenswrapper[4868]: I1003 13:09:08.747850 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p5w5j" event={"ID":"49f095a3-2d77-498e-bacc-3e6c711f4700","Type":"ContainerStarted","Data":"3d5eae1aefb3874bfe2d333209bc42134f0a67f761d437ac2c59fef76e169453"} Oct 03 13:09:09 crc kubenswrapper[4868]: I1003 13:09:09.821811 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Oct 03 13:09:09 crc kubenswrapper[4868]: I1003 13:09:09.822208 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Oct 03 13:09:09 crc kubenswrapper[4868]: I1003 13:09:09.901090 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Oct 03 13:09:09 crc kubenswrapper[4868]: I1003 13:09:09.901598 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Oct 03 13:09:10 crc kubenswrapper[4868]: I1003 13:09:10.774080 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Oct 03 13:09:10 crc kubenswrapper[4868]: I1003 13:09:10.866428 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Oct 03 13:09:11 crc kubenswrapper[4868]: I1003 13:09:11.398925 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:11 crc kubenswrapper[4868]: E1003 13:09:11.399204 4868 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:09:11 crc kubenswrapper[4868]: E1003 13:09:11.399385 4868 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:09:11 crc kubenswrapper[4868]: E1003 13:09:11.399465 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift podName:bb0842a9-9947-4561-af16-154496b90622 nodeName:}" failed. No retries permitted until 2025-10-03 13:09:19.399444009 +0000 UTC m=+1155.609293085 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift") pod "swift-storage-0" (UID: "bb0842a9-9947-4561-af16-154496b90622") : configmap "swift-ring-files" not found Oct 03 13:09:11 crc kubenswrapper[4868]: I1003 13:09:11.678259 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:11 crc kubenswrapper[4868]: I1003 13:09:11.779587 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"26ca4d65-6de7-490b-9492-c9fd70fe37b5","Type":"ContainerStarted","Data":"ff9e3ea6b9396583d232ce7c0778b8f9dd0134e368a4641f868dc2c278de4761"} Oct 03 13:09:11 crc kubenswrapper[4868]: I1003 13:09:11.779654 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"26ca4d65-6de7-490b-9492-c9fd70fe37b5","Type":"ContainerStarted","Data":"b21b40352c41495ca3cba5a8c1ce3083dc7a3a9c61723638b29274b8b849536a"} Oct 03 13:09:11 crc kubenswrapper[4868]: I1003 13:09:11.815669 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.992909281 podStartE2EDuration="10.815631157s" podCreationTimestamp="2025-10-03 13:09:01 +0000 UTC" firstStartedPulling="2025-10-03 13:09:02.959573042 +0000 UTC m=+1139.169422108" lastFinishedPulling="2025-10-03 13:09:10.782294918 +0000 UTC m=+1146.992143984" observedRunningTime="2025-10-03 13:09:11.806829091 +0000 UTC m=+1148.016678167" watchObservedRunningTime="2025-10-03 13:09:11.815631157 +0000 UTC m=+1148.025480223" Oct 03 13:09:11 crc kubenswrapper[4868]: I1003 13:09:11.964614 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Oct 03 13:09:12 crc kubenswrapper[4868]: I1003 13:09:12.015542 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Oct 03 13:09:12 crc kubenswrapper[4868]: I1003 13:09:12.257295 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Oct 03 13:09:12 crc kubenswrapper[4868]: I1003 13:09:12.878869 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:09:12 crc kubenswrapper[4868]: I1003 13:09:12.953660 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9hdst"] Oct 03 13:09:12 crc kubenswrapper[4868]: I1003 13:09:12.953961 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerName="dnsmasq-dns" containerID="cri-o://9c567bae4e160d17076cb35dce4c0b046b18571a5b3c6904a58f2c039ff50d1b" gracePeriod=10 Oct 03 13:09:13 crc kubenswrapper[4868]: I1003 13:09:13.795403 4868 generic.go:334] "Generic (PLEG): container finished" podID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerID="9c567bae4e160d17076cb35dce4c0b046b18571a5b3c6904a58f2c039ff50d1b" exitCode=0 Oct 03 13:09:13 crc kubenswrapper[4868]: I1003 13:09:13.796197 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" event={"ID":"c592801a-34fa-484e-bca5-31f2a53a64f7","Type":"ContainerDied","Data":"9c567bae4e160d17076cb35dce4c0b046b18571a5b3c6904a58f2c039ff50d1b"} Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.280288 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.380947 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-nb\") pod \"c592801a-34fa-484e-bca5-31f2a53a64f7\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.381102 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-dns-svc\") pod \"c592801a-34fa-484e-bca5-31f2a53a64f7\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.381301 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-sb\") pod \"c592801a-34fa-484e-bca5-31f2a53a64f7\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.381371 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-config\") pod \"c592801a-34fa-484e-bca5-31f2a53a64f7\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.381515 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pmrst\" (UniqueName: \"kubernetes.io/projected/c592801a-34fa-484e-bca5-31f2a53a64f7-kube-api-access-pmrst\") pod \"c592801a-34fa-484e-bca5-31f2a53a64f7\" (UID: \"c592801a-34fa-484e-bca5-31f2a53a64f7\") " Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.389941 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c592801a-34fa-484e-bca5-31f2a53a64f7-kube-api-access-pmrst" (OuterVolumeSpecName: "kube-api-access-pmrst") pod "c592801a-34fa-484e-bca5-31f2a53a64f7" (UID: "c592801a-34fa-484e-bca5-31f2a53a64f7"). InnerVolumeSpecName "kube-api-access-pmrst". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.425282 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c592801a-34fa-484e-bca5-31f2a53a64f7" (UID: "c592801a-34fa-484e-bca5-31f2a53a64f7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.425381 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c592801a-34fa-484e-bca5-31f2a53a64f7" (UID: "c592801a-34fa-484e-bca5-31f2a53a64f7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.430604 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-config" (OuterVolumeSpecName: "config") pod "c592801a-34fa-484e-bca5-31f2a53a64f7" (UID: "c592801a-34fa-484e-bca5-31f2a53a64f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.431463 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c592801a-34fa-484e-bca5-31f2a53a64f7" (UID: "c592801a-34fa-484e-bca5-31f2a53a64f7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.483498 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.483530 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.483539 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.483547 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c592801a-34fa-484e-bca5-31f2a53a64f7-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.483557 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pmrst\" (UniqueName: \"kubernetes.io/projected/c592801a-34fa-484e-bca5-31f2a53a64f7-kube-api-access-pmrst\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.814453 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" event={"ID":"c592801a-34fa-484e-bca5-31f2a53a64f7","Type":"ContainerDied","Data":"48d9c1e8a5b950c2bb51017fe3ed9a21c671f26e81d7e426786ac7ffe4c04543"} Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.814521 4868 scope.go:117] "RemoveContainer" containerID="9c567bae4e160d17076cb35dce4c0b046b18571a5b3c6904a58f2c039ff50d1b" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.814515 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-9hdst" Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.859361 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9hdst"] Oct 03 13:09:15 crc kubenswrapper[4868]: I1003 13:09:15.865041 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-9hdst"] Oct 03 13:09:16 crc kubenswrapper[4868]: I1003 13:09:16.553610 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" path="/var/lib/kubelet/pods/c592801a-34fa-484e-bca5-31f2a53a64f7/volumes" Oct 03 13:09:19 crc kubenswrapper[4868]: I1003 13:09:19.453755 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:19 crc kubenswrapper[4868]: E1003 13:09:19.454866 4868 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:09:19 crc kubenswrapper[4868]: E1003 13:09:19.455265 4868 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:09:19 crc kubenswrapper[4868]: E1003 13:09:19.455352 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift podName:bb0842a9-9947-4561-af16-154496b90622 nodeName:}" failed. No retries permitted until 2025-10-03 13:09:35.45533396 +0000 UTC m=+1171.665183026 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift") pod "swift-storage-0" (UID: "bb0842a9-9947-4561-af16-154496b90622") : configmap "swift-ring-files" not found Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.143747 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-kvllh"] Oct 03 13:09:20 crc kubenswrapper[4868]: E1003 13:09:20.144245 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerName="dnsmasq-dns" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.144266 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerName="dnsmasq-dns" Oct 03 13:09:20 crc kubenswrapper[4868]: E1003 13:09:20.144285 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerName="init" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.144292 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerName="init" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.144501 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="c592801a-34fa-484e-bca5-31f2a53a64f7" containerName="dnsmasq-dns" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.145328 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kvllh" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.156777 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kvllh"] Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.267761 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmgjv\" (UniqueName: \"kubernetes.io/projected/f27bd7bc-49d9-4848-ae93-24de591a246b-kube-api-access-mmgjv\") pod \"keystone-db-create-kvllh\" (UID: \"f27bd7bc-49d9-4848-ae93-24de591a246b\") " pod="openstack/keystone-db-create-kvllh" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.369726 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmgjv\" (UniqueName: \"kubernetes.io/projected/f27bd7bc-49d9-4848-ae93-24de591a246b-kube-api-access-mmgjv\") pod \"keystone-db-create-kvllh\" (UID: \"f27bd7bc-49d9-4848-ae93-24de591a246b\") " pod="openstack/keystone-db-create-kvllh" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.374429 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-pnblv"] Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.375968 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pnblv" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.387761 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pnblv"] Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.399436 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmgjv\" (UniqueName: \"kubernetes.io/projected/f27bd7bc-49d9-4848-ae93-24de591a246b-kube-api-access-mmgjv\") pod \"keystone-db-create-kvllh\" (UID: \"f27bd7bc-49d9-4848-ae93-24de591a246b\") " pod="openstack/keystone-db-create-kvllh" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.466751 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kvllh" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.472390 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hqv6\" (UniqueName: \"kubernetes.io/projected/14d9af9c-75b5-4fa2-a918-abb6814addd7-kube-api-access-8hqv6\") pod \"placement-db-create-pnblv\" (UID: \"14d9af9c-75b5-4fa2-a918-abb6814addd7\") " pod="openstack/placement-db-create-pnblv" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.575432 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hqv6\" (UniqueName: \"kubernetes.io/projected/14d9af9c-75b5-4fa2-a918-abb6814addd7-kube-api-access-8hqv6\") pod \"placement-db-create-pnblv\" (UID: \"14d9af9c-75b5-4fa2-a918-abb6814addd7\") " pod="openstack/placement-db-create-pnblv" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.599597 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hqv6\" (UniqueName: \"kubernetes.io/projected/14d9af9c-75b5-4fa2-a918-abb6814addd7-kube-api-access-8hqv6\") pod \"placement-db-create-pnblv\" (UID: \"14d9af9c-75b5-4fa2-a918-abb6814addd7\") " pod="openstack/placement-db-create-pnblv" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.674950 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-sw7ll"] Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.676610 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sw7ll" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.683761 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sw7ll"] Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.691667 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pnblv" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.778704 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6g29z\" (UniqueName: \"kubernetes.io/projected/80b2d7b9-b29e-44a3-b55e-04e21bf1afd5-kube-api-access-6g29z\") pod \"glance-db-create-sw7ll\" (UID: \"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5\") " pod="openstack/glance-db-create-sw7ll" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.880560 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6g29z\" (UniqueName: \"kubernetes.io/projected/80b2d7b9-b29e-44a3-b55e-04e21bf1afd5-kube-api-access-6g29z\") pod \"glance-db-create-sw7ll\" (UID: \"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5\") " pod="openstack/glance-db-create-sw7ll" Oct 03 13:09:20 crc kubenswrapper[4868]: I1003 13:09:20.901113 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6g29z\" (UniqueName: \"kubernetes.io/projected/80b2d7b9-b29e-44a3-b55e-04e21bf1afd5-kube-api-access-6g29z\") pod \"glance-db-create-sw7ll\" (UID: \"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5\") " pod="openstack/glance-db-create-sw7ll" Oct 03 13:09:21 crc kubenswrapper[4868]: I1003 13:09:21.001783 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sw7ll" Oct 03 13:09:22 crc kubenswrapper[4868]: I1003 13:09:22.324251 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Oct 03 13:09:26 crc kubenswrapper[4868]: I1003 13:09:26.473939 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-w4z7q" podUID="75727504-0a62-4459-add3-419d244f05ff" containerName="ovn-controller" probeResult="failure" output=< Oct 03 13:09:26 crc kubenswrapper[4868]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 03 13:09:26 crc kubenswrapper[4868]: > Oct 03 13:09:26 crc kubenswrapper[4868]: I1003 13:09:26.497102 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:09:30 crc kubenswrapper[4868]: I1003 13:09:30.972127 4868 generic.go:334] "Generic (PLEG): container finished" podID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerID="50e1b41735c10e406a5cb897fadf8e51579bbb8ae1b7b47347c387d79cde3bf1" exitCode=0 Oct 03 13:09:30 crc kubenswrapper[4868]: I1003 13:09:30.972272 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd","Type":"ContainerDied","Data":"50e1b41735c10e406a5cb897fadf8e51579bbb8ae1b7b47347c387d79cde3bf1"} Oct 03 13:09:30 crc kubenswrapper[4868]: I1003 13:09:30.976814 4868 generic.go:334] "Generic (PLEG): container finished" podID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerID="625f04b3a97858bf53e28d9d473c9007bd6767b393dbbec4bff35073cc8d592f" exitCode=0 Oct 03 13:09:30 crc kubenswrapper[4868]: I1003 13:09:30.976859 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8e1f1515-2197-4124-83dc-382a70cd3e91","Type":"ContainerDied","Data":"625f04b3a97858bf53e28d9d473c9007bd6767b393dbbec4bff35073cc8d592f"} Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.471114 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-w4z7q" podUID="75727504-0a62-4459-add3-419d244f05ff" containerName="ovn-controller" probeResult="failure" output=< Oct 03 13:09:31 crc kubenswrapper[4868]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Oct 03 13:09:31 crc kubenswrapper[4868]: > Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.489070 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-5lxjj" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.721648 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-w4z7q-config-btcvt"] Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.723428 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.729273 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-w4z7q-config-btcvt"] Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.733600 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.799129 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-log-ovn\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.799197 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-additional-scripts\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.799226 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqz9p\" (UniqueName: \"kubernetes.io/projected/219fef08-4e62-4049-8747-ee4e22c88935-kube-api-access-pqz9p\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.799636 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-scripts\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.799787 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.799835 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run-ovn\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.806426 4868 scope.go:117] "RemoveContainer" containerID="489da5ee8c8f6825e37f99e7035b212118d302e98f2c2a7d6d7f10a2c67c59e0" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.902610 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-additional-scripts\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903043 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqz9p\" (UniqueName: \"kubernetes.io/projected/219fef08-4e62-4049-8747-ee4e22c88935-kube-api-access-pqz9p\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903165 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-scripts\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903204 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903221 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run-ovn\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903287 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-log-ovn\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903620 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-log-ovn\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903679 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run-ovn\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.903739 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.905558 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-scripts\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.907214 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-additional-scripts\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:31 crc kubenswrapper[4868]: I1003 13:09:31.928501 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqz9p\" (UniqueName: \"kubernetes.io/projected/219fef08-4e62-4049-8747-ee4e22c88935-kube-api-access-pqz9p\") pod \"ovn-controller-w4z7q-config-btcvt\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:32 crc kubenswrapper[4868]: I1003 13:09:32.048600 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:32 crc kubenswrapper[4868]: I1003 13:09:32.145633 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:09:32 crc kubenswrapper[4868]: I1003 13:09:32.145703 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:09:32 crc kubenswrapper[4868]: I1003 13:09:32.418699 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-sw7ll"] Oct 03 13:09:32 crc kubenswrapper[4868]: W1003 13:09:32.438010 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod80b2d7b9_b29e_44a3_b55e_04e21bf1afd5.slice/crio-66d4d5e5d22414d017c85b757c98aa0dd5615d7012d72b8fcfe3c7aafb0433d9 WatchSource:0}: Error finding container 66d4d5e5d22414d017c85b757c98aa0dd5615d7012d72b8fcfe3c7aafb0433d9: Status 404 returned error can't find the container with id 66d4d5e5d22414d017c85b757c98aa0dd5615d7012d72b8fcfe3c7aafb0433d9 Oct 03 13:09:32 crc kubenswrapper[4868]: I1003 13:09:32.675530 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-kvllh"] Oct 03 13:09:32 crc kubenswrapper[4868]: W1003 13:09:32.691992 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf27bd7bc_49d9_4848_ae93_24de591a246b.slice/crio-badf580889bb21ebb286003a0a2a0305d1f88f0ea924686ee826397ef26c3bc5 WatchSource:0}: Error finding container badf580889bb21ebb286003a0a2a0305d1f88f0ea924686ee826397ef26c3bc5: Status 404 returned error can't find the container with id badf580889bb21ebb286003a0a2a0305d1f88f0ea924686ee826397ef26c3bc5 Oct 03 13:09:32 crc kubenswrapper[4868]: I1003 13:09:32.697620 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-pnblv"] Oct 03 13:09:32 crc kubenswrapper[4868]: W1003 13:09:32.704584 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod14d9af9c_75b5_4fa2_a918_abb6814addd7.slice/crio-06fdf6761412a103bf30920c8462ad6d6e6e31ab56f511a7ec19b9354eee0bc1 WatchSource:0}: Error finding container 06fdf6761412a103bf30920c8462ad6d6e6e31ab56f511a7ec19b9354eee0bc1: Status 404 returned error can't find the container with id 06fdf6761412a103bf30920c8462ad6d6e6e31ab56f511a7ec19b9354eee0bc1 Oct 03 13:09:32 crc kubenswrapper[4868]: I1003 13:09:32.892561 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-w4z7q-config-btcvt"] Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.016527 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd","Type":"ContainerStarted","Data":"feeccaadeb9938a2f31874ae5d739e7389c6c44f8e0be360c4b213c568c5afdd"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.017147 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.023077 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kvllh" event={"ID":"f27bd7bc-49d9-4848-ae93-24de591a246b","Type":"ContainerStarted","Data":"9c235725f63464c8c72ef58a773f7f874c74d65001a9ef80949b03391743402b"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.023140 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kvllh" event={"ID":"f27bd7bc-49d9-4848-ae93-24de591a246b","Type":"ContainerStarted","Data":"badf580889bb21ebb286003a0a2a0305d1f88f0ea924686ee826397ef26c3bc5"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.025693 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p5w5j" event={"ID":"49f095a3-2d77-498e-bacc-3e6c711f4700","Type":"ContainerStarted","Data":"d12cf923daec3166051032d778a096728df2629aea738fe123bdbe8f183cdb42"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.027570 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q-config-btcvt" event={"ID":"219fef08-4e62-4049-8747-ee4e22c88935","Type":"ContainerStarted","Data":"512d9fa09e1cc711031a3a6fbfd152f2d5228ed5a737b861597f1da905606247"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.032792 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pnblv" event={"ID":"14d9af9c-75b5-4fa2-a918-abb6814addd7","Type":"ContainerStarted","Data":"676bc9d954942f888b57a60c03aee1c2f170d78fca36a66e22f1f87eb8b1518b"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.032822 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pnblv" event={"ID":"14d9af9c-75b5-4fa2-a918-abb6814addd7","Type":"ContainerStarted","Data":"06fdf6761412a103bf30920c8462ad6d6e6e31ab56f511a7ec19b9354eee0bc1"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.040861 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8e1f1515-2197-4124-83dc-382a70cd3e91","Type":"ContainerStarted","Data":"c7c70b0b002d4d2030b948ed263438815e14f295e91b125c61af989ec76d1fbf"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.041123 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.049309 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=61.03842078 podStartE2EDuration="1m8.049288042s" podCreationTimestamp="2025-10-03 13:08:25 +0000 UTC" firstStartedPulling="2025-10-03 13:08:47.307140888 +0000 UTC m=+1123.516989954" lastFinishedPulling="2025-10-03 13:08:54.31800815 +0000 UTC m=+1130.527857216" observedRunningTime="2025-10-03 13:09:33.0424755 +0000 UTC m=+1169.252324556" watchObservedRunningTime="2025-10-03 13:09:33.049288042 +0000 UTC m=+1169.259137108" Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.060688 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sw7ll" event={"ID":"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5","Type":"ContainerStarted","Data":"4270681f5a62c3977527854fb986e54c3412d21903f0c3037684d75ec4b3e5c1"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.060750 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sw7ll" event={"ID":"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5","Type":"ContainerStarted","Data":"66d4d5e5d22414d017c85b757c98aa0dd5615d7012d72b8fcfe3c7aafb0433d9"} Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.090886 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-kvllh" podStartSLOduration=13.090860824 podStartE2EDuration="13.090860824s" podCreationTimestamp="2025-10-03 13:09:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:09:33.070700144 +0000 UTC m=+1169.280549210" watchObservedRunningTime="2025-10-03 13:09:33.090860824 +0000 UTC m=+1169.300709880" Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.092285 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-p5w5j" podStartSLOduration=2.28118612 podStartE2EDuration="26.092277512s" podCreationTimestamp="2025-10-03 13:09:07 +0000 UTC" firstStartedPulling="2025-10-03 13:09:08.239459606 +0000 UTC m=+1144.449308672" lastFinishedPulling="2025-10-03 13:09:32.050550998 +0000 UTC m=+1168.260400064" observedRunningTime="2025-10-03 13:09:33.089691452 +0000 UTC m=+1169.299540518" watchObservedRunningTime="2025-10-03 13:09:33.092277512 +0000 UTC m=+1169.302126578" Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.125591 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=60.610442546 podStartE2EDuration="1m8.125567951s" podCreationTimestamp="2025-10-03 13:08:25 +0000 UTC" firstStartedPulling="2025-10-03 13:08:47.329385043 +0000 UTC m=+1123.539234109" lastFinishedPulling="2025-10-03 13:08:54.844510458 +0000 UTC m=+1131.054359514" observedRunningTime="2025-10-03 13:09:33.119115869 +0000 UTC m=+1169.328964965" watchObservedRunningTime="2025-10-03 13:09:33.125567951 +0000 UTC m=+1169.335417007" Oct 03 13:09:33 crc kubenswrapper[4868]: I1003 13:09:33.143690 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-create-pnblv" podStartSLOduration=13.143659225 podStartE2EDuration="13.143659225s" podCreationTimestamp="2025-10-03 13:09:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:09:33.138384455 +0000 UTC m=+1169.348233521" watchObservedRunningTime="2025-10-03 13:09:33.143659225 +0000 UTC m=+1169.353508291" Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.073028 4868 generic.go:334] "Generic (PLEG): container finished" podID="80b2d7b9-b29e-44a3-b55e-04e21bf1afd5" containerID="4270681f5a62c3977527854fb986e54c3412d21903f0c3037684d75ec4b3e5c1" exitCode=0 Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.073093 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sw7ll" event={"ID":"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5","Type":"ContainerDied","Data":"4270681f5a62c3977527854fb986e54c3412d21903f0c3037684d75ec4b3e5c1"} Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.077551 4868 generic.go:334] "Generic (PLEG): container finished" podID="f27bd7bc-49d9-4848-ae93-24de591a246b" containerID="9c235725f63464c8c72ef58a773f7f874c74d65001a9ef80949b03391743402b" exitCode=0 Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.077660 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kvllh" event={"ID":"f27bd7bc-49d9-4848-ae93-24de591a246b","Type":"ContainerDied","Data":"9c235725f63464c8c72ef58a773f7f874c74d65001a9ef80949b03391743402b"} Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.080360 4868 generic.go:334] "Generic (PLEG): container finished" podID="219fef08-4e62-4049-8747-ee4e22c88935" containerID="2dc4f072b7830febd41ea7cbe7eca7d27c1121467e46a41fbf3027d374619a3d" exitCode=0 Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.080419 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q-config-btcvt" event={"ID":"219fef08-4e62-4049-8747-ee4e22c88935","Type":"ContainerDied","Data":"2dc4f072b7830febd41ea7cbe7eca7d27c1121467e46a41fbf3027d374619a3d"} Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.087883 4868 generic.go:334] "Generic (PLEG): container finished" podID="14d9af9c-75b5-4fa2-a918-abb6814addd7" containerID="676bc9d954942f888b57a60c03aee1c2f170d78fca36a66e22f1f87eb8b1518b" exitCode=0 Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.088098 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pnblv" event={"ID":"14d9af9c-75b5-4fa2-a918-abb6814addd7","Type":"ContainerDied","Data":"676bc9d954942f888b57a60c03aee1c2f170d78fca36a66e22f1f87eb8b1518b"} Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.470517 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sw7ll" Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.585606 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g29z\" (UniqueName: \"kubernetes.io/projected/80b2d7b9-b29e-44a3-b55e-04e21bf1afd5-kube-api-access-6g29z\") pod \"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5\" (UID: \"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5\") " Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.594403 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/80b2d7b9-b29e-44a3-b55e-04e21bf1afd5-kube-api-access-6g29z" (OuterVolumeSpecName: "kube-api-access-6g29z") pod "80b2d7b9-b29e-44a3-b55e-04e21bf1afd5" (UID: "80b2d7b9-b29e-44a3-b55e-04e21bf1afd5"). InnerVolumeSpecName "kube-api-access-6g29z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:34 crc kubenswrapper[4868]: I1003 13:09:34.688847 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g29z\" (UniqueName: \"kubernetes.io/projected/80b2d7b9-b29e-44a3-b55e-04e21bf1afd5-kube-api-access-6g29z\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.099817 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-sw7ll" event={"ID":"80b2d7b9-b29e-44a3-b55e-04e21bf1afd5","Type":"ContainerDied","Data":"66d4d5e5d22414d017c85b757c98aa0dd5615d7012d72b8fcfe3c7aafb0433d9"} Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.099879 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66d4d5e5d22414d017c85b757c98aa0dd5615d7012d72b8fcfe3c7aafb0433d9" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.099843 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-sw7ll" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.458104 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.502074 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-scripts\") pod \"219fef08-4e62-4049-8747-ee4e22c88935\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.502868 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run-ovn\") pod \"219fef08-4e62-4049-8747-ee4e22c88935\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.502949 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run\") pod \"219fef08-4e62-4049-8747-ee4e22c88935\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503007 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-log-ovn\") pod \"219fef08-4e62-4049-8747-ee4e22c88935\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503092 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-additional-scripts\") pod \"219fef08-4e62-4049-8747-ee4e22c88935\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503133 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pqz9p\" (UniqueName: \"kubernetes.io/projected/219fef08-4e62-4049-8747-ee4e22c88935-kube-api-access-pqz9p\") pod \"219fef08-4e62-4049-8747-ee4e22c88935\" (UID: \"219fef08-4e62-4049-8747-ee4e22c88935\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503424 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503628 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-scripts" (OuterVolumeSpecName: "scripts") pod "219fef08-4e62-4049-8747-ee4e22c88935" (UID: "219fef08-4e62-4049-8747-ee4e22c88935"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503682 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "219fef08-4e62-4049-8747-ee4e22c88935" (UID: "219fef08-4e62-4049-8747-ee4e22c88935"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503703 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "219fef08-4e62-4049-8747-ee4e22c88935" (UID: "219fef08-4e62-4049-8747-ee4e22c88935"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503725 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run" (OuterVolumeSpecName: "var-run") pod "219fef08-4e62-4049-8747-ee4e22c88935" (UID: "219fef08-4e62-4049-8747-ee4e22c88935"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.503998 4868 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.504019 4868 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.504031 4868 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/219fef08-4e62-4049-8747-ee4e22c88935-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.504039 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: E1003 13:09:35.504208 4868 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Oct 03 13:09:35 crc kubenswrapper[4868]: E1003 13:09:35.504263 4868 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.504351 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "219fef08-4e62-4049-8747-ee4e22c88935" (UID: "219fef08-4e62-4049-8747-ee4e22c88935"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: E1003 13:09:35.504375 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift podName:bb0842a9-9947-4561-af16-154496b90622 nodeName:}" failed. No retries permitted until 2025-10-03 13:10:07.504320745 +0000 UTC m=+1203.714169811 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift") pod "swift-storage-0" (UID: "bb0842a9-9947-4561-af16-154496b90622") : configmap "swift-ring-files" not found Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.509428 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/219fef08-4e62-4049-8747-ee4e22c88935-kube-api-access-pqz9p" (OuterVolumeSpecName: "kube-api-access-pqz9p") pod "219fef08-4e62-4049-8747-ee4e22c88935" (UID: "219fef08-4e62-4049-8747-ee4e22c88935"). InnerVolumeSpecName "kube-api-access-pqz9p". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.589448 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pnblv" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.597010 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kvllh" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.604438 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mmgjv\" (UniqueName: \"kubernetes.io/projected/f27bd7bc-49d9-4848-ae93-24de591a246b-kube-api-access-mmgjv\") pod \"f27bd7bc-49d9-4848-ae93-24de591a246b\" (UID: \"f27bd7bc-49d9-4848-ae93-24de591a246b\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.604587 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hqv6\" (UniqueName: \"kubernetes.io/projected/14d9af9c-75b5-4fa2-a918-abb6814addd7-kube-api-access-8hqv6\") pod \"14d9af9c-75b5-4fa2-a918-abb6814addd7\" (UID: \"14d9af9c-75b5-4fa2-a918-abb6814addd7\") " Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.604983 4868 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/219fef08-4e62-4049-8747-ee4e22c88935-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.605009 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pqz9p\" (UniqueName: \"kubernetes.io/projected/219fef08-4e62-4049-8747-ee4e22c88935-kube-api-access-pqz9p\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.608797 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f27bd7bc-49d9-4848-ae93-24de591a246b-kube-api-access-mmgjv" (OuterVolumeSpecName: "kube-api-access-mmgjv") pod "f27bd7bc-49d9-4848-ae93-24de591a246b" (UID: "f27bd7bc-49d9-4848-ae93-24de591a246b"). InnerVolumeSpecName "kube-api-access-mmgjv". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.615215 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/14d9af9c-75b5-4fa2-a918-abb6814addd7-kube-api-access-8hqv6" (OuterVolumeSpecName: "kube-api-access-8hqv6") pod "14d9af9c-75b5-4fa2-a918-abb6814addd7" (UID: "14d9af9c-75b5-4fa2-a918-abb6814addd7"). InnerVolumeSpecName "kube-api-access-8hqv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.706540 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hqv6\" (UniqueName: \"kubernetes.io/projected/14d9af9c-75b5-4fa2-a918-abb6814addd7-kube-api-access-8hqv6\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:35 crc kubenswrapper[4868]: I1003 13:09:35.706585 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mmgjv\" (UniqueName: \"kubernetes.io/projected/f27bd7bc-49d9-4848-ae93-24de591a246b-kube-api-access-mmgjv\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.109731 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q-config-btcvt" event={"ID":"219fef08-4e62-4049-8747-ee4e22c88935","Type":"ContainerDied","Data":"512d9fa09e1cc711031a3a6fbfd152f2d5228ed5a737b861597f1da905606247"} Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.109776 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="512d9fa09e1cc711031a3a6fbfd152f2d5228ed5a737b861597f1da905606247" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.109784 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-btcvt" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.111320 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-pnblv" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.111336 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-pnblv" event={"ID":"14d9af9c-75b5-4fa2-a918-abb6814addd7","Type":"ContainerDied","Data":"06fdf6761412a103bf30920c8462ad6d6e6e31ab56f511a7ec19b9354eee0bc1"} Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.111391 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="06fdf6761412a103bf30920c8462ad6d6e6e31ab56f511a7ec19b9354eee0bc1" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.112532 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-kvllh" event={"ID":"f27bd7bc-49d9-4848-ae93-24de591a246b","Type":"ContainerDied","Data":"badf580889bb21ebb286003a0a2a0305d1f88f0ea924686ee826397ef26c3bc5"} Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.112554 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="badf580889bb21ebb286003a0a2a0305d1f88f0ea924686ee826397ef26c3bc5" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.112615 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-kvllh" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.471376 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-w4z7q" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.559396 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-w4z7q-config-btcvt"] Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.564868 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-w4z7q-config-btcvt"] Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.661543 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-w4z7q-config-2qql9"] Oct 03 13:09:36 crc kubenswrapper[4868]: E1003 13:09:36.661875 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="14d9af9c-75b5-4fa2-a918-abb6814addd7" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.661892 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="14d9af9c-75b5-4fa2-a918-abb6814addd7" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: E1003 13:09:36.661899 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="80b2d7b9-b29e-44a3-b55e-04e21bf1afd5" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.661906 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="80b2d7b9-b29e-44a3-b55e-04e21bf1afd5" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: E1003 13:09:36.661924 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="219fef08-4e62-4049-8747-ee4e22c88935" containerName="ovn-config" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.661930 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="219fef08-4e62-4049-8747-ee4e22c88935" containerName="ovn-config" Oct 03 13:09:36 crc kubenswrapper[4868]: E1003 13:09:36.661948 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f27bd7bc-49d9-4848-ae93-24de591a246b" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.661953 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f27bd7bc-49d9-4848-ae93-24de591a246b" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.662128 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="f27bd7bc-49d9-4848-ae93-24de591a246b" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.662141 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="14d9af9c-75b5-4fa2-a918-abb6814addd7" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.662152 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="219fef08-4e62-4049-8747-ee4e22c88935" containerName="ovn-config" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.662168 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="80b2d7b9-b29e-44a3-b55e-04e21bf1afd5" containerName="mariadb-database-create" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.662745 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.665389 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.686500 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-w4z7q-config-2qql9"] Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.731110 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run-ovn\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.731196 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-log-ovn\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.731238 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-scripts\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.731284 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-additional-scripts\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.731450 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.731491 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6l86z\" (UniqueName: \"kubernetes.io/projected/5c02ec41-ca92-4486-8062-2f5e380351c2-kube-api-access-6l86z\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.833141 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run-ovn\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.833631 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-log-ovn\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.833661 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-scripts\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.833696 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-additional-scripts\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.833867 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.833898 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6l86z\" (UniqueName: \"kubernetes.io/projected/5c02ec41-ca92-4486-8062-2f5e380351c2-kube-api-access-6l86z\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.834740 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run-ovn\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.834819 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-log-ovn\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.837332 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.837401 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-scripts\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.837873 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-additional-scripts\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.861082 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6l86z\" (UniqueName: \"kubernetes.io/projected/5c02ec41-ca92-4486-8062-2f5e380351c2-kube-api-access-6l86z\") pod \"ovn-controller-w4z7q-config-2qql9\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:36 crc kubenswrapper[4868]: I1003 13:09:36.979022 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:37 crc kubenswrapper[4868]: I1003 13:09:37.415571 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-w4z7q-config-2qql9"] Oct 03 13:09:37 crc kubenswrapper[4868]: W1003 13:09:37.424284 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5c02ec41_ca92_4486_8062_2f5e380351c2.slice/crio-4d70fcc783cc6d6e2a19cafd701ddbcfc5296519e7a850ab5d11446717087878 WatchSource:0}: Error finding container 4d70fcc783cc6d6e2a19cafd701ddbcfc5296519e7a850ab5d11446717087878: Status 404 returned error can't find the container with id 4d70fcc783cc6d6e2a19cafd701ddbcfc5296519e7a850ab5d11446717087878 Oct 03 13:09:38 crc kubenswrapper[4868]: I1003 13:09:38.130942 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q-config-2qql9" event={"ID":"5c02ec41-ca92-4486-8062-2f5e380351c2","Type":"ContainerStarted","Data":"b0f2a21f37f5b3b3bdbb8507a708023799e11718cac95370703e46492653ed9a"} Oct 03 13:09:38 crc kubenswrapper[4868]: I1003 13:09:38.131342 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q-config-2qql9" event={"ID":"5c02ec41-ca92-4486-8062-2f5e380351c2","Type":"ContainerStarted","Data":"4d70fcc783cc6d6e2a19cafd701ddbcfc5296519e7a850ab5d11446717087878"} Oct 03 13:09:38 crc kubenswrapper[4868]: I1003 13:09:38.554950 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="219fef08-4e62-4049-8747-ee4e22c88935" path="/var/lib/kubelet/pods/219fef08-4e62-4049-8747-ee4e22c88935/volumes" Oct 03 13:09:39 crc kubenswrapper[4868]: I1003 13:09:39.148592 4868 generic.go:334] "Generic (PLEG): container finished" podID="5c02ec41-ca92-4486-8062-2f5e380351c2" containerID="b0f2a21f37f5b3b3bdbb8507a708023799e11718cac95370703e46492653ed9a" exitCode=0 Oct 03 13:09:39 crc kubenswrapper[4868]: I1003 13:09:39.148667 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q-config-2qql9" event={"ID":"5c02ec41-ca92-4486-8062-2f5e380351c2","Type":"ContainerDied","Data":"b0f2a21f37f5b3b3bdbb8507a708023799e11718cac95370703e46492653ed9a"} Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.470824 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.522951 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-0b54-account-create-zkj92"] Oct 03 13:09:40 crc kubenswrapper[4868]: E1003 13:09:40.523408 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c02ec41-ca92-4486-8062-2f5e380351c2" containerName="ovn-config" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.523429 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c02ec41-ca92-4486-8062-2f5e380351c2" containerName="ovn-config" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.524803 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c02ec41-ca92-4486-8062-2f5e380351c2" containerName="ovn-config" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.525913 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0b54-account-create-zkj92" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.547377 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.582528 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0b54-account-create-zkj92"] Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.610922 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run\") pod \"5c02ec41-ca92-4486-8062-2f5e380351c2\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.611009 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-additional-scripts\") pod \"5c02ec41-ca92-4486-8062-2f5e380351c2\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.611029 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-log-ovn\") pod \"5c02ec41-ca92-4486-8062-2f5e380351c2\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.611094 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run-ovn\") pod \"5c02ec41-ca92-4486-8062-2f5e380351c2\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.611189 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-scripts\") pod \"5c02ec41-ca92-4486-8062-2f5e380351c2\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.611218 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6l86z\" (UniqueName: \"kubernetes.io/projected/5c02ec41-ca92-4486-8062-2f5e380351c2-kube-api-access-6l86z\") pod \"5c02ec41-ca92-4486-8062-2f5e380351c2\" (UID: \"5c02ec41-ca92-4486-8062-2f5e380351c2\") " Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.611452 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8k4t9\" (UniqueName: \"kubernetes.io/projected/e1ef9769-358f-4f0e-850f-3da4ffd5637b-kube-api-access-8k4t9\") pod \"placement-0b54-account-create-zkj92\" (UID: \"e1ef9769-358f-4f0e-850f-3da4ffd5637b\") " pod="openstack/placement-0b54-account-create-zkj92" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.611557 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run" (OuterVolumeSpecName: "var-run") pod "5c02ec41-ca92-4486-8062-2f5e380351c2" (UID: "5c02ec41-ca92-4486-8062-2f5e380351c2"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.612308 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "5c02ec41-ca92-4486-8062-2f5e380351c2" (UID: "5c02ec41-ca92-4486-8062-2f5e380351c2"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.612341 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "5c02ec41-ca92-4486-8062-2f5e380351c2" (UID: "5c02ec41-ca92-4486-8062-2f5e380351c2"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.612357 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "5c02ec41-ca92-4486-8062-2f5e380351c2" (UID: "5c02ec41-ca92-4486-8062-2f5e380351c2"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.613033 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-scripts" (OuterVolumeSpecName: "scripts") pod "5c02ec41-ca92-4486-8062-2f5e380351c2" (UID: "5c02ec41-ca92-4486-8062-2f5e380351c2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.619289 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c02ec41-ca92-4486-8062-2f5e380351c2-kube-api-access-6l86z" (OuterVolumeSpecName: "kube-api-access-6l86z") pod "5c02ec41-ca92-4486-8062-2f5e380351c2" (UID: "5c02ec41-ca92-4486-8062-2f5e380351c2"). InnerVolumeSpecName "kube-api-access-6l86z". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.712628 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8k4t9\" (UniqueName: \"kubernetes.io/projected/e1ef9769-358f-4f0e-850f-3da4ffd5637b-kube-api-access-8k4t9\") pod \"placement-0b54-account-create-zkj92\" (UID: \"e1ef9769-358f-4f0e-850f-3da4ffd5637b\") " pod="openstack/placement-0b54-account-create-zkj92" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.712718 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.712732 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6l86z\" (UniqueName: \"kubernetes.io/projected/5c02ec41-ca92-4486-8062-2f5e380351c2-kube-api-access-6l86z\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.712743 4868 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.712755 4868 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/5c02ec41-ca92-4486-8062-2f5e380351c2-additional-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.712766 4868 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-log-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.712778 4868 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/5c02ec41-ca92-4486-8062-2f5e380351c2-var-run-ovn\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.730105 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8k4t9\" (UniqueName: \"kubernetes.io/projected/e1ef9769-358f-4f0e-850f-3da4ffd5637b-kube-api-access-8k4t9\") pod \"placement-0b54-account-create-zkj92\" (UID: \"e1ef9769-358f-4f0e-850f-3da4ffd5637b\") " pod="openstack/placement-0b54-account-create-zkj92" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.829244 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-73a0-account-create-llxsx"] Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.830418 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-73a0-account-create-llxsx" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.833986 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.837094 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-73a0-account-create-llxsx"] Oct 03 13:09:40 crc kubenswrapper[4868]: I1003 13:09:40.857526 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0b54-account-create-zkj92" Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.017190 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znswk\" (UniqueName: \"kubernetes.io/projected/3ea96de3-ce21-44c7-8447-9d85172f25e4-kube-api-access-znswk\") pod \"glance-73a0-account-create-llxsx\" (UID: \"3ea96de3-ce21-44c7-8447-9d85172f25e4\") " pod="openstack/glance-73a0-account-create-llxsx" Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.119516 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-0b54-account-create-zkj92"] Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.119665 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znswk\" (UniqueName: \"kubernetes.io/projected/3ea96de3-ce21-44c7-8447-9d85172f25e4-kube-api-access-znswk\") pod \"glance-73a0-account-create-llxsx\" (UID: \"3ea96de3-ce21-44c7-8447-9d85172f25e4\") " pod="openstack/glance-73a0-account-create-llxsx" Oct 03 13:09:41 crc kubenswrapper[4868]: W1003 13:09:41.124074 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode1ef9769_358f_4f0e_850f_3da4ffd5637b.slice/crio-1a0fa7723685d977d84500aba8392f209f5d61952cb41221f3dd9582f1f1e331 WatchSource:0}: Error finding container 1a0fa7723685d977d84500aba8392f209f5d61952cb41221f3dd9582f1f1e331: Status 404 returned error can't find the container with id 1a0fa7723685d977d84500aba8392f209f5d61952cb41221f3dd9582f1f1e331 Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.139031 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znswk\" (UniqueName: \"kubernetes.io/projected/3ea96de3-ce21-44c7-8447-9d85172f25e4-kube-api-access-znswk\") pod \"glance-73a0-account-create-llxsx\" (UID: \"3ea96de3-ce21-44c7-8447-9d85172f25e4\") " pod="openstack/glance-73a0-account-create-llxsx" Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.167648 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0b54-account-create-zkj92" event={"ID":"e1ef9769-358f-4f0e-850f-3da4ffd5637b","Type":"ContainerStarted","Data":"1a0fa7723685d977d84500aba8392f209f5d61952cb41221f3dd9582f1f1e331"} Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.171601 4868 generic.go:334] "Generic (PLEG): container finished" podID="49f095a3-2d77-498e-bacc-3e6c711f4700" containerID="d12cf923daec3166051032d778a096728df2629aea738fe123bdbe8f183cdb42" exitCode=0 Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.171697 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p5w5j" event={"ID":"49f095a3-2d77-498e-bacc-3e6c711f4700","Type":"ContainerDied","Data":"d12cf923daec3166051032d778a096728df2629aea738fe123bdbe8f183cdb42"} Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.173801 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-w4z7q-config-2qql9" event={"ID":"5c02ec41-ca92-4486-8062-2f5e380351c2","Type":"ContainerDied","Data":"4d70fcc783cc6d6e2a19cafd701ddbcfc5296519e7a850ab5d11446717087878"} Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.173830 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4d70fcc783cc6d6e2a19cafd701ddbcfc5296519e7a850ab5d11446717087878" Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.173876 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-w4z7q-config-2qql9" Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.241133 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-73a0-account-create-llxsx" Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.534614 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-w4z7q-config-2qql9"] Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.544271 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-w4z7q-config-2qql9"] Oct 03 13:09:41 crc kubenswrapper[4868]: I1003 13:09:41.664472 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-73a0-account-create-llxsx"] Oct 03 13:09:41 crc kubenswrapper[4868]: W1003 13:09:41.668308 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ea96de3_ce21_44c7_8447_9d85172f25e4.slice/crio-8611d4a4ced4852ed62eb512be9d9586c5a8784c7083c29e2c7223c57df96d73 WatchSource:0}: Error finding container 8611d4a4ced4852ed62eb512be9d9586c5a8784c7083c29e2c7223c57df96d73: Status 404 returned error can't find the container with id 8611d4a4ced4852ed62eb512be9d9586c5a8784c7083c29e2c7223c57df96d73 Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.183044 4868 generic.go:334] "Generic (PLEG): container finished" podID="e1ef9769-358f-4f0e-850f-3da4ffd5637b" containerID="f63d7b32ca95d3eb702a9fc2e56ed660573604f0034fec42aff53ef7382831a3" exitCode=0 Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.183162 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0b54-account-create-zkj92" event={"ID":"e1ef9769-358f-4f0e-850f-3da4ffd5637b","Type":"ContainerDied","Data":"f63d7b32ca95d3eb702a9fc2e56ed660573604f0034fec42aff53ef7382831a3"} Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.184902 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-73a0-account-create-llxsx" event={"ID":"3ea96de3-ce21-44c7-8447-9d85172f25e4","Type":"ContainerStarted","Data":"788a2a71d8590da76a07322415d4fd884637e3a1f2b1542a7244770414c5169d"} Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.184994 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-73a0-account-create-llxsx" event={"ID":"3ea96de3-ce21-44c7-8447-9d85172f25e4","Type":"ContainerStarted","Data":"8611d4a4ced4852ed62eb512be9d9586c5a8784c7083c29e2c7223c57df96d73"} Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.225023 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-73a0-account-create-llxsx" podStartSLOduration=2.224996124 podStartE2EDuration="2.224996124s" podCreationTimestamp="2025-10-03 13:09:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:09:42.217931555 +0000 UTC m=+1178.427780621" watchObservedRunningTime="2025-10-03 13:09:42.224996124 +0000 UTC m=+1178.434845190" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.480731 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.564569 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c02ec41-ca92-4486-8062-2f5e380351c2" path="/var/lib/kubelet/pods/5c02ec41-ca92-4486-8062-2f5e380351c2/volumes" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.648106 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-combined-ca-bundle\") pod \"49f095a3-2d77-498e-bacc-3e6c711f4700\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.648488 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-ring-data-devices\") pod \"49f095a3-2d77-498e-bacc-3e6c711f4700\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.648550 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-swiftconf\") pod \"49f095a3-2d77-498e-bacc-3e6c711f4700\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.649142 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "49f095a3-2d77-498e-bacc-3e6c711f4700" (UID: "49f095a3-2d77-498e-bacc-3e6c711f4700"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.649201 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qp7x6\" (UniqueName: \"kubernetes.io/projected/49f095a3-2d77-498e-bacc-3e6c711f4700-kube-api-access-qp7x6\") pod \"49f095a3-2d77-498e-bacc-3e6c711f4700\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.649250 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/49f095a3-2d77-498e-bacc-3e6c711f4700-etc-swift\") pod \"49f095a3-2d77-498e-bacc-3e6c711f4700\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.649288 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-dispersionconf\") pod \"49f095a3-2d77-498e-bacc-3e6c711f4700\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.649329 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-scripts\") pod \"49f095a3-2d77-498e-bacc-3e6c711f4700\" (UID: \"49f095a3-2d77-498e-bacc-3e6c711f4700\") " Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.649898 4868 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-ring-data-devices\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.650136 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/49f095a3-2d77-498e-bacc-3e6c711f4700-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "49f095a3-2d77-498e-bacc-3e6c711f4700" (UID: "49f095a3-2d77-498e-bacc-3e6c711f4700"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.663772 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "49f095a3-2d77-498e-bacc-3e6c711f4700" (UID: "49f095a3-2d77-498e-bacc-3e6c711f4700"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.676023 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49f095a3-2d77-498e-bacc-3e6c711f4700-kube-api-access-qp7x6" (OuterVolumeSpecName: "kube-api-access-qp7x6") pod "49f095a3-2d77-498e-bacc-3e6c711f4700" (UID: "49f095a3-2d77-498e-bacc-3e6c711f4700"). InnerVolumeSpecName "kube-api-access-qp7x6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.682428 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "49f095a3-2d77-498e-bacc-3e6c711f4700" (UID: "49f095a3-2d77-498e-bacc-3e6c711f4700"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.685727 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "49f095a3-2d77-498e-bacc-3e6c711f4700" (UID: "49f095a3-2d77-498e-bacc-3e6c711f4700"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.686596 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-scripts" (OuterVolumeSpecName: "scripts") pod "49f095a3-2d77-498e-bacc-3e6c711f4700" (UID: "49f095a3-2d77-498e-bacc-3e6c711f4700"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.750873 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.750907 4868 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-swiftconf\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.750918 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qp7x6\" (UniqueName: \"kubernetes.io/projected/49f095a3-2d77-498e-bacc-3e6c711f4700-kube-api-access-qp7x6\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.750928 4868 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/49f095a3-2d77-498e-bacc-3e6c711f4700-etc-swift\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.750936 4868 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/49f095a3-2d77-498e-bacc-3e6c711f4700-dispersionconf\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:42 crc kubenswrapper[4868]: I1003 13:09:42.750943 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/49f095a3-2d77-498e-bacc-3e6c711f4700-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:43 crc kubenswrapper[4868]: I1003 13:09:43.193861 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-p5w5j" event={"ID":"49f095a3-2d77-498e-bacc-3e6c711f4700","Type":"ContainerDied","Data":"3d5eae1aefb3874bfe2d333209bc42134f0a67f761d437ac2c59fef76e169453"} Oct 03 13:09:43 crc kubenswrapper[4868]: I1003 13:09:43.193930 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-p5w5j" Oct 03 13:09:43 crc kubenswrapper[4868]: I1003 13:09:43.193960 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3d5eae1aefb3874bfe2d333209bc42134f0a67f761d437ac2c59fef76e169453" Oct 03 13:09:43 crc kubenswrapper[4868]: I1003 13:09:43.514135 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0b54-account-create-zkj92" Oct 03 13:09:43 crc kubenswrapper[4868]: I1003 13:09:43.666557 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8k4t9\" (UniqueName: \"kubernetes.io/projected/e1ef9769-358f-4f0e-850f-3da4ffd5637b-kube-api-access-8k4t9\") pod \"e1ef9769-358f-4f0e-850f-3da4ffd5637b\" (UID: \"e1ef9769-358f-4f0e-850f-3da4ffd5637b\") " Oct 03 13:09:43 crc kubenswrapper[4868]: I1003 13:09:43.673258 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e1ef9769-358f-4f0e-850f-3da4ffd5637b-kube-api-access-8k4t9" (OuterVolumeSpecName: "kube-api-access-8k4t9") pod "e1ef9769-358f-4f0e-850f-3da4ffd5637b" (UID: "e1ef9769-358f-4f0e-850f-3da4ffd5637b"). InnerVolumeSpecName "kube-api-access-8k4t9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:43 crc kubenswrapper[4868]: I1003 13:09:43.769566 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8k4t9\" (UniqueName: \"kubernetes.io/projected/e1ef9769-358f-4f0e-850f-3da4ffd5637b-kube-api-access-8k4t9\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:44 crc kubenswrapper[4868]: I1003 13:09:44.203166 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-0b54-account-create-zkj92" Oct 03 13:09:44 crc kubenswrapper[4868]: I1003 13:09:44.203151 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-0b54-account-create-zkj92" event={"ID":"e1ef9769-358f-4f0e-850f-3da4ffd5637b","Type":"ContainerDied","Data":"1a0fa7723685d977d84500aba8392f209f5d61952cb41221f3dd9582f1f1e331"} Oct 03 13:09:44 crc kubenswrapper[4868]: I1003 13:09:44.203308 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1a0fa7723685d977d84500aba8392f209f5d61952cb41221f3dd9582f1f1e331" Oct 03 13:09:44 crc kubenswrapper[4868]: I1003 13:09:44.206206 4868 generic.go:334] "Generic (PLEG): container finished" podID="3ea96de3-ce21-44c7-8447-9d85172f25e4" containerID="788a2a71d8590da76a07322415d4fd884637e3a1f2b1542a7244770414c5169d" exitCode=0 Oct 03 13:09:44 crc kubenswrapper[4868]: I1003 13:09:44.206244 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-73a0-account-create-llxsx" event={"ID":"3ea96de3-ce21-44c7-8447-9d85172f25e4","Type":"ContainerDied","Data":"788a2a71d8590da76a07322415d4fd884637e3a1f2b1542a7244770414c5169d"} Oct 03 13:09:45 crc kubenswrapper[4868]: I1003 13:09:45.501670 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-73a0-account-create-llxsx" Oct 03 13:09:45 crc kubenswrapper[4868]: I1003 13:09:45.699147 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znswk\" (UniqueName: \"kubernetes.io/projected/3ea96de3-ce21-44c7-8447-9d85172f25e4-kube-api-access-znswk\") pod \"3ea96de3-ce21-44c7-8447-9d85172f25e4\" (UID: \"3ea96de3-ce21-44c7-8447-9d85172f25e4\") " Oct 03 13:09:45 crc kubenswrapper[4868]: I1003 13:09:45.705593 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ea96de3-ce21-44c7-8447-9d85172f25e4-kube-api-access-znswk" (OuterVolumeSpecName: "kube-api-access-znswk") pod "3ea96de3-ce21-44c7-8447-9d85172f25e4" (UID: "3ea96de3-ce21-44c7-8447-9d85172f25e4"). InnerVolumeSpecName "kube-api-access-znswk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:45 crc kubenswrapper[4868]: I1003 13:09:45.801461 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znswk\" (UniqueName: \"kubernetes.io/projected/3ea96de3-ce21-44c7-8447-9d85172f25e4-kube-api-access-znswk\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:46 crc kubenswrapper[4868]: I1003 13:09:46.222780 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-73a0-account-create-llxsx" event={"ID":"3ea96de3-ce21-44c7-8447-9d85172f25e4","Type":"ContainerDied","Data":"8611d4a4ced4852ed62eb512be9d9586c5a8784c7083c29e2c7223c57df96d73"} Oct 03 13:09:46 crc kubenswrapper[4868]: I1003 13:09:46.222833 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8611d4a4ced4852ed62eb512be9d9586c5a8784c7083c29e2c7223c57df96d73" Oct 03 13:09:46 crc kubenswrapper[4868]: I1003 13:09:46.223274 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-73a0-account-create-llxsx" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.036255 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.096262 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.418409 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-qkmwv"] Oct 03 13:09:47 crc kubenswrapper[4868]: E1003 13:09:47.418856 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e1ef9769-358f-4f0e-850f-3da4ffd5637b" containerName="mariadb-account-create" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.418880 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e1ef9769-358f-4f0e-850f-3da4ffd5637b" containerName="mariadb-account-create" Oct 03 13:09:47 crc kubenswrapper[4868]: E1003 13:09:47.418901 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="49f095a3-2d77-498e-bacc-3e6c711f4700" containerName="swift-ring-rebalance" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.418909 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="49f095a3-2d77-498e-bacc-3e6c711f4700" containerName="swift-ring-rebalance" Oct 03 13:09:47 crc kubenswrapper[4868]: E1003 13:09:47.418933 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3ea96de3-ce21-44c7-8447-9d85172f25e4" containerName="mariadb-account-create" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.418941 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="3ea96de3-ce21-44c7-8447-9d85172f25e4" containerName="mariadb-account-create" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.419177 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e1ef9769-358f-4f0e-850f-3da4ffd5637b" containerName="mariadb-account-create" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.419195 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="49f095a3-2d77-498e-bacc-3e6c711f4700" containerName="swift-ring-rebalance" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.419208 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="3ea96de3-ce21-44c7-8447-9d85172f25e4" containerName="mariadb-account-create" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.419910 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qkmwv" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.430512 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qkmwv"] Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.458182 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zft5g\" (UniqueName: \"kubernetes.io/projected/7d01dd09-a4c0-4798-aacf-9f601f0f3502-kube-api-access-zft5g\") pod \"cinder-db-create-qkmwv\" (UID: \"7d01dd09-a4c0-4798-aacf-9f601f0f3502\") " pod="openstack/cinder-db-create-qkmwv" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.506651 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-xkslz"] Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.507808 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xkslz" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.529472 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xkslz"] Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.559950 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zft5g\" (UniqueName: \"kubernetes.io/projected/7d01dd09-a4c0-4798-aacf-9f601f0f3502-kube-api-access-zft5g\") pod \"cinder-db-create-qkmwv\" (UID: \"7d01dd09-a4c0-4798-aacf-9f601f0f3502\") " pod="openstack/cinder-db-create-qkmwv" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.587108 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zft5g\" (UniqueName: \"kubernetes.io/projected/7d01dd09-a4c0-4798-aacf-9f601f0f3502-kube-api-access-zft5g\") pod \"cinder-db-create-qkmwv\" (UID: \"7d01dd09-a4c0-4798-aacf-9f601f0f3502\") " pod="openstack/cinder-db-create-qkmwv" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.661915 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blzgw\" (UniqueName: \"kubernetes.io/projected/57f54345-335f-4bda-99a4-0e25bcb84c69-kube-api-access-blzgw\") pod \"barbican-db-create-xkslz\" (UID: \"57f54345-335f-4bda-99a4-0e25bcb84c69\") " pod="openstack/barbican-db-create-xkslz" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.709632 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-vw4nf"] Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.711341 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vw4nf" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.720882 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vw4nf"] Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.741520 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qkmwv" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.763615 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blzgw\" (UniqueName: \"kubernetes.io/projected/57f54345-335f-4bda-99a4-0e25bcb84c69-kube-api-access-blzgw\") pod \"barbican-db-create-xkslz\" (UID: \"57f54345-335f-4bda-99a4-0e25bcb84c69\") " pod="openstack/barbican-db-create-xkslz" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.785005 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blzgw\" (UniqueName: \"kubernetes.io/projected/57f54345-335f-4bda-99a4-0e25bcb84c69-kube-api-access-blzgw\") pod \"barbican-db-create-xkslz\" (UID: \"57f54345-335f-4bda-99a4-0e25bcb84c69\") " pod="openstack/barbican-db-create-xkslz" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.823969 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xkslz" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.866197 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbh9v\" (UniqueName: \"kubernetes.io/projected/113709cc-e6ba-4cb5-9eb4-7ddc62f39afe-kube-api-access-cbh9v\") pod \"neutron-db-create-vw4nf\" (UID: \"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe\") " pod="openstack/neutron-db-create-vw4nf" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.968048 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbh9v\" (UniqueName: \"kubernetes.io/projected/113709cc-e6ba-4cb5-9eb4-7ddc62f39afe-kube-api-access-cbh9v\") pod \"neutron-db-create-vw4nf\" (UID: \"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe\") " pod="openstack/neutron-db-create-vw4nf" Oct 03 13:09:47 crc kubenswrapper[4868]: I1003 13:09:47.986652 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbh9v\" (UniqueName: \"kubernetes.io/projected/113709cc-e6ba-4cb5-9eb4-7ddc62f39afe-kube-api-access-cbh9v\") pod \"neutron-db-create-vw4nf\" (UID: \"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe\") " pod="openstack/neutron-db-create-vw4nf" Oct 03 13:09:48 crc kubenswrapper[4868]: I1003 13:09:48.032550 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-qkmwv"] Oct 03 13:09:48 crc kubenswrapper[4868]: I1003 13:09:48.037530 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vw4nf" Oct 03 13:09:48 crc kubenswrapper[4868]: W1003 13:09:48.038195 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d01dd09_a4c0_4798_aacf_9f601f0f3502.slice/crio-cb17576d0d5ed424039b8eddf51f91b85b463c4ec41a2a081997a69dac47a662 WatchSource:0}: Error finding container cb17576d0d5ed424039b8eddf51f91b85b463c4ec41a2a081997a69dac47a662: Status 404 returned error can't find the container with id cb17576d0d5ed424039b8eddf51f91b85b463c4ec41a2a081997a69dac47a662 Oct 03 13:09:48 crc kubenswrapper[4868]: I1003 13:09:48.100167 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-xkslz"] Oct 03 13:09:48 crc kubenswrapper[4868]: W1003 13:09:48.143734 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod57f54345_335f_4bda_99a4_0e25bcb84c69.slice/crio-f8743c8bf379ec934e3d9ad2cbdec6900202de4aa49ae1f9749c93e5337999bf WatchSource:0}: Error finding container f8743c8bf379ec934e3d9ad2cbdec6900202de4aa49ae1f9749c93e5337999bf: Status 404 returned error can't find the container with id f8743c8bf379ec934e3d9ad2cbdec6900202de4aa49ae1f9749c93e5337999bf Oct 03 13:09:48 crc kubenswrapper[4868]: I1003 13:09:48.253891 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qkmwv" event={"ID":"7d01dd09-a4c0-4798-aacf-9f601f0f3502","Type":"ContainerStarted","Data":"cb17576d0d5ed424039b8eddf51f91b85b463c4ec41a2a081997a69dac47a662"} Oct 03 13:09:48 crc kubenswrapper[4868]: I1003 13:09:48.255328 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xkslz" event={"ID":"57f54345-335f-4bda-99a4-0e25bcb84c69","Type":"ContainerStarted","Data":"f8743c8bf379ec934e3d9ad2cbdec6900202de4aa49ae1f9749c93e5337999bf"} Oct 03 13:09:48 crc kubenswrapper[4868]: I1003 13:09:48.513089 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-vw4nf"] Oct 03 13:09:48 crc kubenswrapper[4868]: W1003 13:09:48.520585 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod113709cc_e6ba_4cb5_9eb4_7ddc62f39afe.slice/crio-bb32dd11c44764137e3c5967d28f448a71b5d329006282213a69ae571f5a1d16 WatchSource:0}: Error finding container bb32dd11c44764137e3c5967d28f448a71b5d329006282213a69ae571f5a1d16: Status 404 returned error can't find the container with id bb32dd11c44764137e3c5967d28f448a71b5d329006282213a69ae571f5a1d16 Oct 03 13:09:49 crc kubenswrapper[4868]: I1003 13:09:49.265260 4868 generic.go:334] "Generic (PLEG): container finished" podID="113709cc-e6ba-4cb5-9eb4-7ddc62f39afe" containerID="01820ede449f37ae3b5742b5d25988d42207e4e62a973ab05c7718e1b4ca9869" exitCode=0 Oct 03 13:09:49 crc kubenswrapper[4868]: I1003 13:09:49.265338 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vw4nf" event={"ID":"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe","Type":"ContainerDied","Data":"01820ede449f37ae3b5742b5d25988d42207e4e62a973ab05c7718e1b4ca9869"} Oct 03 13:09:49 crc kubenswrapper[4868]: I1003 13:09:49.265646 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vw4nf" event={"ID":"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe","Type":"ContainerStarted","Data":"bb32dd11c44764137e3c5967d28f448a71b5d329006282213a69ae571f5a1d16"} Oct 03 13:09:49 crc kubenswrapper[4868]: I1003 13:09:49.267266 4868 generic.go:334] "Generic (PLEG): container finished" podID="7d01dd09-a4c0-4798-aacf-9f601f0f3502" containerID="02d223423f833ba2e36f0f7266148a3addd92dab30eccb7a6abe0a3b9b3be0aa" exitCode=0 Oct 03 13:09:49 crc kubenswrapper[4868]: I1003 13:09:49.267361 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qkmwv" event={"ID":"7d01dd09-a4c0-4798-aacf-9f601f0f3502","Type":"ContainerDied","Data":"02d223423f833ba2e36f0f7266148a3addd92dab30eccb7a6abe0a3b9b3be0aa"} Oct 03 13:09:49 crc kubenswrapper[4868]: I1003 13:09:49.269515 4868 generic.go:334] "Generic (PLEG): container finished" podID="57f54345-335f-4bda-99a4-0e25bcb84c69" containerID="0ad2d560b98f1f990748228d9fd8fa1fc2c0ac53476fe39799fd8b004fc2f092" exitCode=0 Oct 03 13:09:49 crc kubenswrapper[4868]: I1003 13:09:49.269548 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xkslz" event={"ID":"57f54345-335f-4bda-99a4-0e25bcb84c69","Type":"ContainerDied","Data":"0ad2d560b98f1f990748228d9fd8fa1fc2c0ac53476fe39799fd8b004fc2f092"} Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.135850 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-6472-account-create-hg6f7"] Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.137533 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6472-account-create-hg6f7" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.141428 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.143960 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6472-account-create-hg6f7"] Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.209265 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45pxl\" (UniqueName: \"kubernetes.io/projected/8c65bc31-bfff-4894-855e-e411deea1819-kube-api-access-45pxl\") pod \"keystone-6472-account-create-hg6f7\" (UID: \"8c65bc31-bfff-4894-855e-e411deea1819\") " pod="openstack/keystone-6472-account-create-hg6f7" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.310434 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45pxl\" (UniqueName: \"kubernetes.io/projected/8c65bc31-bfff-4894-855e-e411deea1819-kube-api-access-45pxl\") pod \"keystone-6472-account-create-hg6f7\" (UID: \"8c65bc31-bfff-4894-855e-e411deea1819\") " pod="openstack/keystone-6472-account-create-hg6f7" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.341220 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45pxl\" (UniqueName: \"kubernetes.io/projected/8c65bc31-bfff-4894-855e-e411deea1819-kube-api-access-45pxl\") pod \"keystone-6472-account-create-hg6f7\" (UID: \"8c65bc31-bfff-4894-855e-e411deea1819\") " pod="openstack/keystone-6472-account-create-hg6f7" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.460297 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6472-account-create-hg6f7" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.700715 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vw4nf" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.719906 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xkslz" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.721090 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qkmwv" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.819959 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbh9v\" (UniqueName: \"kubernetes.io/projected/113709cc-e6ba-4cb5-9eb4-7ddc62f39afe-kube-api-access-cbh9v\") pod \"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe\" (UID: \"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe\") " Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.827041 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/113709cc-e6ba-4cb5-9eb4-7ddc62f39afe-kube-api-access-cbh9v" (OuterVolumeSpecName: "kube-api-access-cbh9v") pod "113709cc-e6ba-4cb5-9eb4-7ddc62f39afe" (UID: "113709cc-e6ba-4cb5-9eb4-7ddc62f39afe"). InnerVolumeSpecName "kube-api-access-cbh9v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.922137 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zft5g\" (UniqueName: \"kubernetes.io/projected/7d01dd09-a4c0-4798-aacf-9f601f0f3502-kube-api-access-zft5g\") pod \"7d01dd09-a4c0-4798-aacf-9f601f0f3502\" (UID: \"7d01dd09-a4c0-4798-aacf-9f601f0f3502\") " Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.922702 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blzgw\" (UniqueName: \"kubernetes.io/projected/57f54345-335f-4bda-99a4-0e25bcb84c69-kube-api-access-blzgw\") pod \"57f54345-335f-4bda-99a4-0e25bcb84c69\" (UID: \"57f54345-335f-4bda-99a4-0e25bcb84c69\") " Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.923286 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbh9v\" (UniqueName: \"kubernetes.io/projected/113709cc-e6ba-4cb5-9eb4-7ddc62f39afe-kube-api-access-cbh9v\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.926001 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d01dd09-a4c0-4798-aacf-9f601f0f3502-kube-api-access-zft5g" (OuterVolumeSpecName: "kube-api-access-zft5g") pod "7d01dd09-a4c0-4798-aacf-9f601f0f3502" (UID: "7d01dd09-a4c0-4798-aacf-9f601f0f3502"). InnerVolumeSpecName "kube-api-access-zft5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:50 crc kubenswrapper[4868]: I1003 13:09:50.926392 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f54345-335f-4bda-99a4-0e25bcb84c69-kube-api-access-blzgw" (OuterVolumeSpecName: "kube-api-access-blzgw") pod "57f54345-335f-4bda-99a4-0e25bcb84c69" (UID: "57f54345-335f-4bda-99a4-0e25bcb84c69"). InnerVolumeSpecName "kube-api-access-blzgw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.024657 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blzgw\" (UniqueName: \"kubernetes.io/projected/57f54345-335f-4bda-99a4-0e25bcb84c69-kube-api-access-blzgw\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.024715 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zft5g\" (UniqueName: \"kubernetes.io/projected/7d01dd09-a4c0-4798-aacf-9f601f0f3502-kube-api-access-zft5g\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.091421 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-pmgh8"] Oct 03 13:09:51 crc kubenswrapper[4868]: E1003 13:09:51.091876 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d01dd09-a4c0-4798-aacf-9f601f0f3502" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.091895 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d01dd09-a4c0-4798-aacf-9f601f0f3502" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: E1003 13:09:51.091912 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="113709cc-e6ba-4cb5-9eb4-7ddc62f39afe" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.091919 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="113709cc-e6ba-4cb5-9eb4-7ddc62f39afe" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: E1003 13:09:51.091941 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f54345-335f-4bda-99a4-0e25bcb84c69" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.091951 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f54345-335f-4bda-99a4-0e25bcb84c69" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.092396 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="113709cc-e6ba-4cb5-9eb4-7ddc62f39afe" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.092421 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f54345-335f-4bda-99a4-0e25bcb84c69" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.092437 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d01dd09-a4c0-4798-aacf-9f601f0f3502" containerName="mariadb-database-create" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.093337 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: W1003 13:09:51.095737 4868 reflector.go:561] object-"openstack"/"glance-glance-dockercfg-nrct9": failed to list *v1.Secret: secrets "glance-glance-dockercfg-nrct9" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Oct 03 13:09:51 crc kubenswrapper[4868]: E1003 13:09:51.095793 4868 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"glance-glance-dockercfg-nrct9\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"glance-glance-dockercfg-nrct9\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 13:09:51 crc kubenswrapper[4868]: W1003 13:09:51.095845 4868 reflector.go:561] object-"openstack"/"glance-config-data": failed to list *v1.Secret: secrets "glance-config-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openstack": no relationship found between node 'crc' and this object Oct 03 13:09:51 crc kubenswrapper[4868]: E1003 13:09:51.095871 4868 reflector.go:158] "Unhandled Error" err="object-\"openstack\"/\"glance-config-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"glance-config-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openstack\": no relationship found between node 'crc' and this object" logger="UnhandledError" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.098686 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-6472-account-create-hg6f7"] Oct 03 13:09:51 crc kubenswrapper[4868]: W1003 13:09:51.103847 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8c65bc31_bfff_4894_855e_e411deea1819.slice/crio-8164954c7da918979a4cdd36a7d3a7bfc512e57fa01a014906b321623d75ce39 WatchSource:0}: Error finding container 8164954c7da918979a4cdd36a7d3a7bfc512e57fa01a014906b321623d75ce39: Status 404 returned error can't find the container with id 8164954c7da918979a4cdd36a7d3a7bfc512e57fa01a014906b321623d75ce39 Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.105908 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-pmgh8"] Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.126106 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.126211 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-combined-ca-bundle\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.126310 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.126352 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg496\" (UniqueName: \"kubernetes.io/projected/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-kube-api-access-tg496\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.227306 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-combined-ca-bundle\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.227390 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.227421 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg496\" (UniqueName: \"kubernetes.io/projected/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-kube-api-access-tg496\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.227475 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.232727 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-combined-ca-bundle\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.248019 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg496\" (UniqueName: \"kubernetes.io/projected/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-kube-api-access-tg496\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.285990 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-xkslz" event={"ID":"57f54345-335f-4bda-99a4-0e25bcb84c69","Type":"ContainerDied","Data":"f8743c8bf379ec934e3d9ad2cbdec6900202de4aa49ae1f9749c93e5337999bf"} Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.286040 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-xkslz" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.286043 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8743c8bf379ec934e3d9ad2cbdec6900202de4aa49ae1f9749c93e5337999bf" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.288409 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-vw4nf" event={"ID":"113709cc-e6ba-4cb5-9eb4-7ddc62f39afe","Type":"ContainerDied","Data":"bb32dd11c44764137e3c5967d28f448a71b5d329006282213a69ae571f5a1d16"} Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.288542 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bb32dd11c44764137e3c5967d28f448a71b5d329006282213a69ae571f5a1d16" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.288422 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-vw4nf" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.289519 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6472-account-create-hg6f7" event={"ID":"8c65bc31-bfff-4894-855e-e411deea1819","Type":"ContainerStarted","Data":"8164954c7da918979a4cdd36a7d3a7bfc512e57fa01a014906b321623d75ce39"} Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.291245 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-qkmwv" event={"ID":"7d01dd09-a4c0-4798-aacf-9f601f0f3502","Type":"ContainerDied","Data":"cb17576d0d5ed424039b8eddf51f91b85b463c4ec41a2a081997a69dac47a662"} Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.291264 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cb17576d0d5ed424039b8eddf51f91b85b463c4ec41a2a081997a69dac47a662" Oct 03 13:09:51 crc kubenswrapper[4868]: I1003 13:09:51.291300 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-qkmwv" Oct 03 13:09:52 crc kubenswrapper[4868]: E1003 13:09:52.227726 4868 secret.go:188] Couldn't get secret openstack/glance-config-data: failed to sync secret cache: timed out waiting for the condition Oct 03 13:09:52 crc kubenswrapper[4868]: E1003 13:09:52.227807 4868 secret.go:188] Couldn't get secret openstack/glance-config-data: failed to sync secret cache: timed out waiting for the condition Oct 03 13:09:52 crc kubenswrapper[4868]: E1003 13:09:52.228231 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data podName:05b10dd2-0b79-4dfe-9a42-52a392f3cbee nodeName:}" failed. No retries permitted until 2025-10-03 13:09:52.728202073 +0000 UTC m=+1188.938051139 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "db-sync-config-data" (UniqueName: "kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data") pod "glance-db-sync-pmgh8" (UID: "05b10dd2-0b79-4dfe-9a42-52a392f3cbee") : failed to sync secret cache: timed out waiting for the condition Oct 03 13:09:52 crc kubenswrapper[4868]: E1003 13:09:52.228379 4868 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data podName:05b10dd2-0b79-4dfe-9a42-52a392f3cbee nodeName:}" failed. No retries permitted until 2025-10-03 13:09:52.728349817 +0000 UTC m=+1188.938199073 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data") pod "glance-db-sync-pmgh8" (UID: "05b10dd2-0b79-4dfe-9a42-52a392f3cbee") : failed to sync secret cache: timed out waiting for the condition Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.307323 4868 generic.go:334] "Generic (PLEG): container finished" podID="8c65bc31-bfff-4894-855e-e411deea1819" containerID="055a546679aad786111f5d77321a9906eadfa8e3e8ca07f4746a96338176408d" exitCode=0 Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.307375 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6472-account-create-hg6f7" event={"ID":"8c65bc31-bfff-4894-855e-e411deea1819","Type":"ContainerDied","Data":"055a546679aad786111f5d77321a9906eadfa8e3e8ca07f4746a96338176408d"} Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.438451 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.643980 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-nrct9" Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.755183 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.755323 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.761906 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.762324 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data\") pod \"glance-db-sync-pmgh8\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:52 crc kubenswrapper[4868]: I1003 13:09:52.925019 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pmgh8" Oct 03 13:09:53 crc kubenswrapper[4868]: I1003 13:09:53.533177 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-pmgh8"] Oct 03 13:09:53 crc kubenswrapper[4868]: I1003 13:09:53.581673 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6472-account-create-hg6f7" Oct 03 13:09:53 crc kubenswrapper[4868]: I1003 13:09:53.774871 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45pxl\" (UniqueName: \"kubernetes.io/projected/8c65bc31-bfff-4894-855e-e411deea1819-kube-api-access-45pxl\") pod \"8c65bc31-bfff-4894-855e-e411deea1819\" (UID: \"8c65bc31-bfff-4894-855e-e411deea1819\") " Oct 03 13:09:53 crc kubenswrapper[4868]: I1003 13:09:53.782285 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c65bc31-bfff-4894-855e-e411deea1819-kube-api-access-45pxl" (OuterVolumeSpecName: "kube-api-access-45pxl") pod "8c65bc31-bfff-4894-855e-e411deea1819" (UID: "8c65bc31-bfff-4894-855e-e411deea1819"). InnerVolumeSpecName "kube-api-access-45pxl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:09:53 crc kubenswrapper[4868]: I1003 13:09:53.876662 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45pxl\" (UniqueName: \"kubernetes.io/projected/8c65bc31-bfff-4894-855e-e411deea1819-kube-api-access-45pxl\") on node \"crc\" DevicePath \"\"" Oct 03 13:09:54 crc kubenswrapper[4868]: I1003 13:09:54.327202 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-6472-account-create-hg6f7" event={"ID":"8c65bc31-bfff-4894-855e-e411deea1819","Type":"ContainerDied","Data":"8164954c7da918979a4cdd36a7d3a7bfc512e57fa01a014906b321623d75ce39"} Oct 03 13:09:54 crc kubenswrapper[4868]: I1003 13:09:54.327667 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8164954c7da918979a4cdd36a7d3a7bfc512e57fa01a014906b321623d75ce39" Oct 03 13:09:54 crc kubenswrapper[4868]: I1003 13:09:54.327278 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-6472-account-create-hg6f7" Oct 03 13:09:54 crc kubenswrapper[4868]: I1003 13:09:54.329645 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pmgh8" event={"ID":"05b10dd2-0b79-4dfe-9a42-52a392f3cbee","Type":"ContainerStarted","Data":"fa1355b4e15fdf470a9101c121ab1a6be9a2a1c28c4dc6ba0406be98ecb5ab12"} Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.853708 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-2b7zv"] Oct 03 13:09:55 crc kubenswrapper[4868]: E1003 13:09:55.855106 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c65bc31-bfff-4894-855e-e411deea1819" containerName="mariadb-account-create" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.855192 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c65bc31-bfff-4894-855e-e411deea1819" containerName="mariadb-account-create" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.855466 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c65bc31-bfff-4894-855e-e411deea1819" containerName="mariadb-account-create" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.860399 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.865608 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.865657 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.867582 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.875419 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8g5zj" Oct 03 13:09:55 crc kubenswrapper[4868]: I1003 13:09:55.880301 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2b7zv"] Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.020159 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8tfc\" (UniqueName: \"kubernetes.io/projected/b4b896eb-56e1-4a1e-b78a-7004b5b21556-kube-api-access-r8tfc\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.020243 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-combined-ca-bundle\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.020839 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-config-data\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.122533 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8tfc\" (UniqueName: \"kubernetes.io/projected/b4b896eb-56e1-4a1e-b78a-7004b5b21556-kube-api-access-r8tfc\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.122915 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-combined-ca-bundle\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.123071 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-config-data\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.133404 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-config-data\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.133405 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-combined-ca-bundle\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.143927 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8tfc\" (UniqueName: \"kubernetes.io/projected/b4b896eb-56e1-4a1e-b78a-7004b5b21556-kube-api-access-r8tfc\") pod \"keystone-db-sync-2b7zv\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.199481 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:09:56 crc kubenswrapper[4868]: I1003 13:09:56.573653 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-2b7zv"] Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.417114 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2b7zv" event={"ID":"b4b896eb-56e1-4a1e-b78a-7004b5b21556","Type":"ContainerStarted","Data":"b59d9f95c8abfe5db6dd7baa10e74c495d7181f712d6145fffdb21008d929691"} Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.460650 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-3fdd-account-create-sdqv9"] Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.468355 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3fdd-account-create-sdqv9" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.472967 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3fdd-account-create-sdqv9"] Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.480997 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.583834 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpctf\" (UniqueName: \"kubernetes.io/projected/1fb6f069-dc74-4ed1-9c25-848adbceb12e-kube-api-access-rpctf\") pod \"cinder-3fdd-account-create-sdqv9\" (UID: \"1fb6f069-dc74-4ed1-9c25-848adbceb12e\") " pod="openstack/cinder-3fdd-account-create-sdqv9" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.659010 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-8f80-account-create-bqgxd"] Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.660803 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8f80-account-create-bqgxd" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.674046 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.680923 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8f80-account-create-bqgxd"] Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.691588 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpctf\" (UniqueName: \"kubernetes.io/projected/1fb6f069-dc74-4ed1-9c25-848adbceb12e-kube-api-access-rpctf\") pod \"cinder-3fdd-account-create-sdqv9\" (UID: \"1fb6f069-dc74-4ed1-9c25-848adbceb12e\") " pod="openstack/cinder-3fdd-account-create-sdqv9" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.691669 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9p68h\" (UniqueName: \"kubernetes.io/projected/3c01fe0a-9b35-4ad3-976f-2e516e3b1d55-kube-api-access-9p68h\") pod \"barbican-8f80-account-create-bqgxd\" (UID: \"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55\") " pod="openstack/barbican-8f80-account-create-bqgxd" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.736126 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpctf\" (UniqueName: \"kubernetes.io/projected/1fb6f069-dc74-4ed1-9c25-848adbceb12e-kube-api-access-rpctf\") pod \"cinder-3fdd-account-create-sdqv9\" (UID: \"1fb6f069-dc74-4ed1-9c25-848adbceb12e\") " pod="openstack/cinder-3fdd-account-create-sdqv9" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.793765 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9p68h\" (UniqueName: \"kubernetes.io/projected/3c01fe0a-9b35-4ad3-976f-2e516e3b1d55-kube-api-access-9p68h\") pod \"barbican-8f80-account-create-bqgxd\" (UID: \"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55\") " pod="openstack/barbican-8f80-account-create-bqgxd" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.796542 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3fdd-account-create-sdqv9" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.814311 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9p68h\" (UniqueName: \"kubernetes.io/projected/3c01fe0a-9b35-4ad3-976f-2e516e3b1d55-kube-api-access-9p68h\") pod \"barbican-8f80-account-create-bqgxd\" (UID: \"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55\") " pod="openstack/barbican-8f80-account-create-bqgxd" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.866954 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-f1da-account-create-pmqbc"] Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.868953 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1da-account-create-pmqbc" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.872775 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.882240 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f1da-account-create-pmqbc"] Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.895137 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zkxrg\" (UniqueName: \"kubernetes.io/projected/360ccb3f-569b-4679-bb72-149e646914f6-kube-api-access-zkxrg\") pod \"neutron-f1da-account-create-pmqbc\" (UID: \"360ccb3f-569b-4679-bb72-149e646914f6\") " pod="openstack/neutron-f1da-account-create-pmqbc" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.981169 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8f80-account-create-bqgxd" Oct 03 13:09:57 crc kubenswrapper[4868]: I1003 13:09:57.997004 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zkxrg\" (UniqueName: \"kubernetes.io/projected/360ccb3f-569b-4679-bb72-149e646914f6-kube-api-access-zkxrg\") pod \"neutron-f1da-account-create-pmqbc\" (UID: \"360ccb3f-569b-4679-bb72-149e646914f6\") " pod="openstack/neutron-f1da-account-create-pmqbc" Oct 03 13:09:58 crc kubenswrapper[4868]: I1003 13:09:58.027985 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zkxrg\" (UniqueName: \"kubernetes.io/projected/360ccb3f-569b-4679-bb72-149e646914f6-kube-api-access-zkxrg\") pod \"neutron-f1da-account-create-pmqbc\" (UID: \"360ccb3f-569b-4679-bb72-149e646914f6\") " pod="openstack/neutron-f1da-account-create-pmqbc" Oct 03 13:09:58 crc kubenswrapper[4868]: I1003 13:09:58.234375 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1da-account-create-pmqbc" Oct 03 13:09:58 crc kubenswrapper[4868]: I1003 13:09:58.256858 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3fdd-account-create-sdqv9"] Oct 03 13:09:58 crc kubenswrapper[4868]: I1003 13:09:58.432638 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3fdd-account-create-sdqv9" event={"ID":"1fb6f069-dc74-4ed1-9c25-848adbceb12e","Type":"ContainerStarted","Data":"214744b003ec724d02dec82562c31f7fc0fc74142022cd4da5023421ab73202d"} Oct 03 13:09:58 crc kubenswrapper[4868]: I1003 13:09:58.678232 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-8f80-account-create-bqgxd"] Oct 03 13:09:58 crc kubenswrapper[4868]: W1003 13:09:58.697740 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3c01fe0a_9b35_4ad3_976f_2e516e3b1d55.slice/crio-48a8716f91d8ea1d86f72ff2d92f340ebc86951b41bd5522c86f4ea461ac5308 WatchSource:0}: Error finding container 48a8716f91d8ea1d86f72ff2d92f340ebc86951b41bd5522c86f4ea461ac5308: Status 404 returned error can't find the container with id 48a8716f91d8ea1d86f72ff2d92f340ebc86951b41bd5522c86f4ea461ac5308 Oct 03 13:09:58 crc kubenswrapper[4868]: I1003 13:09:58.820584 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-f1da-account-create-pmqbc"] Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.445563 4868 generic.go:334] "Generic (PLEG): container finished" podID="1fb6f069-dc74-4ed1-9c25-848adbceb12e" containerID="0d93addd038ce9b59f9774215bf42b2be184fecf69271bd3bb0de9a86be65256" exitCode=0 Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.445613 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3fdd-account-create-sdqv9" event={"ID":"1fb6f069-dc74-4ed1-9c25-848adbceb12e","Type":"ContainerDied","Data":"0d93addd038ce9b59f9774215bf42b2be184fecf69271bd3bb0de9a86be65256"} Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.449275 4868 generic.go:334] "Generic (PLEG): container finished" podID="360ccb3f-569b-4679-bb72-149e646914f6" containerID="fe6d20aff7678167494707934a72931747ede99fcf012e0301450a7c092d5b30" exitCode=0 Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.449399 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f1da-account-create-pmqbc" event={"ID":"360ccb3f-569b-4679-bb72-149e646914f6","Type":"ContainerDied","Data":"fe6d20aff7678167494707934a72931747ede99fcf012e0301450a7c092d5b30"} Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.449437 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f1da-account-create-pmqbc" event={"ID":"360ccb3f-569b-4679-bb72-149e646914f6","Type":"ContainerStarted","Data":"7e61bbe23a67fd208b69115dcd8bf66bc877f2f60d1d1beff054d55b5686df6a"} Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.455126 4868 generic.go:334] "Generic (PLEG): container finished" podID="3c01fe0a-9b35-4ad3-976f-2e516e3b1d55" containerID="ff4f32c06c6aa9c7ef6502852abf3909d60d1c5c77bec76895b614e377ada302" exitCode=0 Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.455188 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8f80-account-create-bqgxd" event={"ID":"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55","Type":"ContainerDied","Data":"ff4f32c06c6aa9c7ef6502852abf3909d60d1c5c77bec76895b614e377ada302"} Oct 03 13:09:59 crc kubenswrapper[4868]: I1003 13:09:59.455257 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8f80-account-create-bqgxd" event={"ID":"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55","Type":"ContainerStarted","Data":"48a8716f91d8ea1d86f72ff2d92f340ebc86951b41bd5522c86f4ea461ac5308"} Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.145762 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.146610 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.146666 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.147475 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d461164a3dcc0127d9bde036985d70792014962ce61b7453395cd3dd71f6a6c"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.147535 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://7d461164a3dcc0127d9bde036985d70792014962ce61b7453395cd3dd71f6a6c" gracePeriod=600 Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.486994 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="7d461164a3dcc0127d9bde036985d70792014962ce61b7453395cd3dd71f6a6c" exitCode=0 Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.487131 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"7d461164a3dcc0127d9bde036985d70792014962ce61b7453395cd3dd71f6a6c"} Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.487239 4868 scope.go:117] "RemoveContainer" containerID="9f15934458284405926073ad6c96722b605797c6a92fa9cf32b28fa47b81ce6a" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.491423 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-8f80-account-create-bqgxd" event={"ID":"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55","Type":"ContainerDied","Data":"48a8716f91d8ea1d86f72ff2d92f340ebc86951b41bd5522c86f4ea461ac5308"} Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.491484 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48a8716f91d8ea1d86f72ff2d92f340ebc86951b41bd5522c86f4ea461ac5308" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.494107 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3fdd-account-create-sdqv9" event={"ID":"1fb6f069-dc74-4ed1-9c25-848adbceb12e","Type":"ContainerDied","Data":"214744b003ec724d02dec82562c31f7fc0fc74142022cd4da5023421ab73202d"} Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.494174 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="214744b003ec724d02dec82562c31f7fc0fc74142022cd4da5023421ab73202d" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.496868 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-f1da-account-create-pmqbc" event={"ID":"360ccb3f-569b-4679-bb72-149e646914f6","Type":"ContainerDied","Data":"7e61bbe23a67fd208b69115dcd8bf66bc877f2f60d1d1beff054d55b5686df6a"} Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.496925 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e61bbe23a67fd208b69115dcd8bf66bc877f2f60d1d1beff054d55b5686df6a" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.526962 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3fdd-account-create-sdqv9" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.536439 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8f80-account-create-bqgxd" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.556087 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1da-account-create-pmqbc" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.604811 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkxrg\" (UniqueName: \"kubernetes.io/projected/360ccb3f-569b-4679-bb72-149e646914f6-kube-api-access-zkxrg\") pod \"360ccb3f-569b-4679-bb72-149e646914f6\" (UID: \"360ccb3f-569b-4679-bb72-149e646914f6\") " Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.604942 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpctf\" (UniqueName: \"kubernetes.io/projected/1fb6f069-dc74-4ed1-9c25-848adbceb12e-kube-api-access-rpctf\") pod \"1fb6f069-dc74-4ed1-9c25-848adbceb12e\" (UID: \"1fb6f069-dc74-4ed1-9c25-848adbceb12e\") " Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.605247 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9p68h\" (UniqueName: \"kubernetes.io/projected/3c01fe0a-9b35-4ad3-976f-2e516e3b1d55-kube-api-access-9p68h\") pod \"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55\" (UID: \"3c01fe0a-9b35-4ad3-976f-2e516e3b1d55\") " Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.613490 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/360ccb3f-569b-4679-bb72-149e646914f6-kube-api-access-zkxrg" (OuterVolumeSpecName: "kube-api-access-zkxrg") pod "360ccb3f-569b-4679-bb72-149e646914f6" (UID: "360ccb3f-569b-4679-bb72-149e646914f6"). InnerVolumeSpecName "kube-api-access-zkxrg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.615364 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1fb6f069-dc74-4ed1-9c25-848adbceb12e-kube-api-access-rpctf" (OuterVolumeSpecName: "kube-api-access-rpctf") pod "1fb6f069-dc74-4ed1-9c25-848adbceb12e" (UID: "1fb6f069-dc74-4ed1-9c25-848adbceb12e"). InnerVolumeSpecName "kube-api-access-rpctf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.618518 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c01fe0a-9b35-4ad3-976f-2e516e3b1d55-kube-api-access-9p68h" (OuterVolumeSpecName: "kube-api-access-9p68h") pod "3c01fe0a-9b35-4ad3-976f-2e516e3b1d55" (UID: "3c01fe0a-9b35-4ad3-976f-2e516e3b1d55"). InnerVolumeSpecName "kube-api-access-9p68h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.713667 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkxrg\" (UniqueName: \"kubernetes.io/projected/360ccb3f-569b-4679-bb72-149e646914f6-kube-api-access-zkxrg\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.713721 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpctf\" (UniqueName: \"kubernetes.io/projected/1fb6f069-dc74-4ed1-9c25-848adbceb12e-kube-api-access-rpctf\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:02 crc kubenswrapper[4868]: I1003 13:10:02.713732 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9p68h\" (UniqueName: \"kubernetes.io/projected/3c01fe0a-9b35-4ad3-976f-2e516e3b1d55-kube-api-access-9p68h\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:03 crc kubenswrapper[4868]: I1003 13:10:03.505677 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-8f80-account-create-bqgxd" Oct 03 13:10:03 crc kubenswrapper[4868]: I1003 13:10:03.505743 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-f1da-account-create-pmqbc" Oct 03 13:10:03 crc kubenswrapper[4868]: I1003 13:10:03.506229 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3fdd-account-create-sdqv9" Oct 03 13:10:07 crc kubenswrapper[4868]: I1003 13:10:07.545216 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:10:07 crc kubenswrapper[4868]: I1003 13:10:07.555209 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/bb0842a9-9947-4561-af16-154496b90622-etc-swift\") pod \"swift-storage-0\" (UID: \"bb0842a9-9947-4561-af16-154496b90622\") " pod="openstack/swift-storage-0" Oct 03 13:10:07 crc kubenswrapper[4868]: I1003 13:10:07.750420 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Oct 03 13:10:11 crc kubenswrapper[4868]: E1003 13:10:11.308451 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Oct 03 13:10:11 crc kubenswrapper[4868]: E1003 13:10:11.309589 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tg496,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-pmgh8_openstack(05b10dd2-0b79-4dfe-9a42-52a392f3cbee): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:10:11 crc kubenswrapper[4868]: E1003 13:10:11.310791 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-pmgh8" podUID="05b10dd2-0b79-4dfe-9a42-52a392f3cbee" Oct 03 13:10:11 crc kubenswrapper[4868]: E1003 13:10:11.642505 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-pmgh8" podUID="05b10dd2-0b79-4dfe-9a42-52a392f3cbee" Oct 03 13:10:12 crc kubenswrapper[4868]: I1003 13:10:12.723921 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Oct 03 13:10:12 crc kubenswrapper[4868]: W1003 13:10:12.729587 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbb0842a9_9947_4561_af16_154496b90622.slice/crio-aaa76691a466e152df74a0e437326f26682a69cd9b17b889a65d8512089979bf WatchSource:0}: Error finding container aaa76691a466e152df74a0e437326f26682a69cd9b17b889a65d8512089979bf: Status 404 returned error can't find the container with id aaa76691a466e152df74a0e437326f26682a69cd9b17b889a65d8512089979bf Oct 03 13:10:13 crc kubenswrapper[4868]: I1003 13:10:13.639735 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"aaa76691a466e152df74a0e437326f26682a69cd9b17b889a65d8512089979bf"} Oct 03 13:10:14 crc kubenswrapper[4868]: I1003 13:10:14.649645 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2b7zv" event={"ID":"b4b896eb-56e1-4a1e-b78a-7004b5b21556","Type":"ContainerStarted","Data":"4e120149ca6599d9843af7ee57b6762d19c87d6cec68112e5bb9be52d7e922d6"} Oct 03 13:10:15 crc kubenswrapper[4868]: I1003 13:10:15.663447 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"8b77235572aa8a2f22498a0d10a4db0ccbdcf4a0e02d3864bbf22793349616b6"} Oct 03 13:10:16 crc kubenswrapper[4868]: I1003 13:10:16.716334 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-2b7zv" podStartSLOduration=6.081908906 podStartE2EDuration="21.716307594s" podCreationTimestamp="2025-10-03 13:09:55 +0000 UTC" firstStartedPulling="2025-10-03 13:09:56.583807084 +0000 UTC m=+1192.793656150" lastFinishedPulling="2025-10-03 13:10:12.218205762 +0000 UTC m=+1208.428054838" observedRunningTime="2025-10-03 13:10:16.707685454 +0000 UTC m=+1212.917534520" watchObservedRunningTime="2025-10-03 13:10:16.716307594 +0000 UTC m=+1212.926156660" Oct 03 13:10:19 crc kubenswrapper[4868]: I1003 13:10:19.702569 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"17a30061319f0ab9cef6f98740b134c20e30899239746f0a4ac6dca2c4123483"} Oct 03 13:10:19 crc kubenswrapper[4868]: I1003 13:10:19.703396 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"31afbaa77508d020642b3ee10a95af22004aad6c16623e86fb9d7db21c7046f3"} Oct 03 13:10:20 crc kubenswrapper[4868]: I1003 13:10:20.715023 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"eb19ed6c6fb8ecd6c3150506dad8504353f4c966ab0393b6b1df75b1b228c17e"} Oct 03 13:10:20 crc kubenswrapper[4868]: I1003 13:10:20.715511 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"4ed84f6a1ea5ec4cd5890115de0411c4c25a49c30529cde9b562dc13054b52e3"} Oct 03 13:10:21 crc kubenswrapper[4868]: I1003 13:10:21.728971 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"b49657498a6a3503aae846300e51aa1236447e340b97281cf12a9f482848cb9f"} Oct 03 13:10:22 crc kubenswrapper[4868]: I1003 13:10:22.742809 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"4e610380ff1c995a2760dceda6428265d0b65158b1f40ad413a3db5837c2b260"} Oct 03 13:10:22 crc kubenswrapper[4868]: I1003 13:10:22.743297 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"d4c0a753cf07f62f0a7f13f8784827a7abf6bf6735746e389ba3dde935a1ac76"} Oct 03 13:10:22 crc kubenswrapper[4868]: I1003 13:10:22.743313 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"dcf0aebe8f47cd5af08f27ce0d754ecd4c0d94a3dd17a196b06e9984ceec088d"} Oct 03 13:10:23 crc kubenswrapper[4868]: I1003 13:10:23.759197 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"a739d9895f374592f227e78525c9194cd8cbb8769ee958d907e7475788046319"} Oct 03 13:10:23 crc kubenswrapper[4868]: I1003 13:10:23.761151 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"33b18454914d99033db43658bcbb58527d76f465332cf81c6f35e600a38a9709"} Oct 03 13:10:23 crc kubenswrapper[4868]: I1003 13:10:23.763767 4868 generic.go:334] "Generic (PLEG): container finished" podID="b4b896eb-56e1-4a1e-b78a-7004b5b21556" containerID="4e120149ca6599d9843af7ee57b6762d19c87d6cec68112e5bb9be52d7e922d6" exitCode=0 Oct 03 13:10:23 crc kubenswrapper[4868]: I1003 13:10:23.763815 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2b7zv" event={"ID":"b4b896eb-56e1-4a1e-b78a-7004b5b21556","Type":"ContainerDied","Data":"4e120149ca6599d9843af7ee57b6762d19c87d6cec68112e5bb9be52d7e922d6"} Oct 03 13:10:24 crc kubenswrapper[4868]: I1003 13:10:24.784757 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"889ae07042a30148b826678d2004b155eff19732d7e6c90e334186e85016da4c"} Oct 03 13:10:24 crc kubenswrapper[4868]: I1003 13:10:24.784819 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"399d5a917f3ed8300200974973748db9e43edb50f25cd2dd007d6c29a7ff3e00"} Oct 03 13:10:24 crc kubenswrapper[4868]: I1003 13:10:24.784834 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"848dcd66313c10658c04e9cd78642b384aab40cbbea5a66f9f3bf57c12340747"} Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.078949 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.120390 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-combined-ca-bundle\") pod \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.121140 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8tfc\" (UniqueName: \"kubernetes.io/projected/b4b896eb-56e1-4a1e-b78a-7004b5b21556-kube-api-access-r8tfc\") pod \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.121709 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-config-data\") pod \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\" (UID: \"b4b896eb-56e1-4a1e-b78a-7004b5b21556\") " Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.142945 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4b896eb-56e1-4a1e-b78a-7004b5b21556-kube-api-access-r8tfc" (OuterVolumeSpecName: "kube-api-access-r8tfc") pod "b4b896eb-56e1-4a1e-b78a-7004b5b21556" (UID: "b4b896eb-56e1-4a1e-b78a-7004b5b21556"). InnerVolumeSpecName "kube-api-access-r8tfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.153590 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4b896eb-56e1-4a1e-b78a-7004b5b21556" (UID: "b4b896eb-56e1-4a1e-b78a-7004b5b21556"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.184406 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-config-data" (OuterVolumeSpecName: "config-data") pod "b4b896eb-56e1-4a1e-b78a-7004b5b21556" (UID: "b4b896eb-56e1-4a1e-b78a-7004b5b21556"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.225949 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8tfc\" (UniqueName: \"kubernetes.io/projected/b4b896eb-56e1-4a1e-b78a-7004b5b21556-kube-api-access-r8tfc\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.225984 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.225994 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4b896eb-56e1-4a1e-b78a-7004b5b21556-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.388747 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-8dnl9"] Oct 03 13:10:25 crc kubenswrapper[4868]: E1003 13:10:25.390144 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c01fe0a-9b35-4ad3-976f-2e516e3b1d55" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390166 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c01fe0a-9b35-4ad3-976f-2e516e3b1d55" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: E1003 13:10:25.390202 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="360ccb3f-569b-4679-bb72-149e646914f6" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390210 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="360ccb3f-569b-4679-bb72-149e646914f6" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: E1003 13:10:25.390233 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1fb6f069-dc74-4ed1-9c25-848adbceb12e" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390240 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="1fb6f069-dc74-4ed1-9c25-848adbceb12e" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: E1003 13:10:25.390256 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4b896eb-56e1-4a1e-b78a-7004b5b21556" containerName="keystone-db-sync" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390284 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4b896eb-56e1-4a1e-b78a-7004b5b21556" containerName="keystone-db-sync" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390534 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="1fb6f069-dc74-4ed1-9c25-848adbceb12e" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390556 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4b896eb-56e1-4a1e-b78a-7004b5b21556" containerName="keystone-db-sync" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390606 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="360ccb3f-569b-4679-bb72-149e646914f6" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.390622 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c01fe0a-9b35-4ad3-976f-2e516e3b1d55" containerName="mariadb-account-create" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.394417 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.415233 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-8dnl9"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.429160 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-thx96\" (UniqueName: \"kubernetes.io/projected/2d70666a-18ba-4a12-b501-2cd7cf55c780-kube-api-access-thx96\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.429243 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-dns-svc\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.429277 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.429310 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-config\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.429360 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.431842 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-hrsb7"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.433149 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.450779 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hrsb7"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.532885 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-credential-keys\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.532962 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-dns-svc\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.532991 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533022 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-config\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533090 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btnxg\" (UniqueName: \"kubernetes.io/projected/294bc659-a345-4082-82d9-41f60b1204f3-kube-api-access-btnxg\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533130 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533163 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-fernet-keys\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533232 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-config-data\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533305 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-combined-ca-bundle\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533333 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-scripts\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.533358 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-thx96\" (UniqueName: \"kubernetes.io/projected/2d70666a-18ba-4a12-b501-2cd7cf55c780-kube-api-access-thx96\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.534996 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-dns-svc\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.535762 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.535969 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-config\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.536746 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.574875 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-thx96\" (UniqueName: \"kubernetes.io/projected/2d70666a-18ba-4a12-b501-2cd7cf55c780-kube-api-access-thx96\") pod \"dnsmasq-dns-f877ddd87-8dnl9\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.597900 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-mdrmw"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.599656 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.608396 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bgz86" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.608662 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.608836 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.667634 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-credential-keys\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.667880 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btnxg\" (UniqueName: \"kubernetes.io/projected/294bc659-a345-4082-82d9-41f60b1204f3-kube-api-access-btnxg\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.667985 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-fernet-keys\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.668206 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-config-data\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.668343 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-combined-ca-bundle\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.668390 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-scripts\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.671836 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.678237 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.697204 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-combined-ca-bundle\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.725165 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mdrmw"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.726191 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btnxg\" (UniqueName: \"kubernetes.io/projected/294bc659-a345-4082-82d9-41f60b1204f3-kube-api-access-btnxg\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.726995 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.727634 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-scripts\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.730148 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-credential-keys\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.772639 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-db-sync-config-data\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.774592 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-scripts\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.774697 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-config-data\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.775928 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-config-data\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.776213 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-fernet-keys\") pod \"keystone-bootstrap-hrsb7\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.783455 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-combined-ca-bundle\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.788009 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.799045 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-84c47f46bf-k89t4"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.802505 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff373899-8e15-4a17-a2dc-ae81859fc44e-etc-machine-id\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.802567 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tcwf\" (UniqueName: \"kubernetes.io/projected/ff373899-8e15-4a17-a2dc-ae81859fc44e-kube-api-access-9tcwf\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.818603 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.825748 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-kpq9r" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.826123 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.826298 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.833129 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.854755 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84c47f46bf-k89t4"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.904119 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-8krw5"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.905799 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.908845 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6f9d076-5dbd-4fc9-b231-28b392c9490a-logs\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.908916 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-db-sync-config-data\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.908956 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a6f9d076-5dbd-4fc9-b231-28b392c9490a-horizon-secret-key\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909030 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-config-data\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909087 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-scripts\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909108 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-scripts\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909133 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2rnhq\" (UniqueName: \"kubernetes.io/projected/a6f9d076-5dbd-4fc9-b231-28b392c9490a-kube-api-access-2rnhq\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909153 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-config-data\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909171 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-combined-ca-bundle\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909210 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff373899-8e15-4a17-a2dc-ae81859fc44e-etc-machine-id\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.909228 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tcwf\" (UniqueName: \"kubernetes.io/projected/ff373899-8e15-4a17-a2dc-ae81859fc44e-kube-api-access-9tcwf\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.914174 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-8krw5"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.914341 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff373899-8e15-4a17-a2dc-ae81859fc44e-etc-machine-id\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.926756 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.926892 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.927092 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-gm7mg" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.930206 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.942530 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-db-sync-config-data\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.950112 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.950269 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.951983 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-config-data\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.952554 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-scripts\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.956564 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-v2vbf"] Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.954046 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-combined-ca-bundle\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.964133 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.964815 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tcwf\" (UniqueName: \"kubernetes.io/projected/ff373899-8e15-4a17-a2dc-ae81859fc44e-kube-api-access-9tcwf\") pod \"cinder-db-sync-mdrmw\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.967717 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"491662fc7c500b5450e95d2783a884736c8e3e97f063b3a2433f6ac0e48bbda0"} Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.967838 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.971343 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.971666 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zhrmp" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.972571 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.975291 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-2b7zv" event={"ID":"b4b896eb-56e1-4a1e-b78a-7004b5b21556","Type":"ContainerDied","Data":"b59d9f95c8abfe5db6dd7baa10e74c495d7181f712d6145fffdb21008d929691"} Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.975352 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b59d9f95c8abfe5db6dd7baa10e74c495d7181f712d6145fffdb21008d929691" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.975429 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-2b7zv" Oct 03 13:10:25 crc kubenswrapper[4868]: I1003 13:10:25.990448 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-v2vbf"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010715 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-config-data\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010786 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-scripts\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010816 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-scripts\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010839 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2rnhq\" (UniqueName: \"kubernetes.io/projected/a6f9d076-5dbd-4fc9-b231-28b392c9490a-kube-api-access-2rnhq\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010857 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010897 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010914 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-combined-ca-bundle\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010947 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j87tf\" (UniqueName: \"kubernetes.io/projected/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-kube-api-access-j87tf\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010966 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-config\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.010991 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6f9d076-5dbd-4fc9-b231-28b392c9490a-logs\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.011020 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a6f9d076-5dbd-4fc9-b231-28b392c9490a-horizon-secret-key\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.011041 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gzxfk\" (UniqueName: \"kubernetes.io/projected/a4226f68-3dce-4cd2-a376-68cec266cd91-kube-api-access-gzxfk\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.011074 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-run-httpd\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.011097 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-log-httpd\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.011121 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-config-data\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.015706 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6f9d076-5dbd-4fc9-b231-28b392c9490a-logs\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.016136 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-scripts\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.016193 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-config-data\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.023017 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a6f9d076-5dbd-4fc9-b231-28b392c9490a-horizon-secret-key\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.043309 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-8dnl9"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.070895 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2rnhq\" (UniqueName: \"kubernetes.io/projected/a6f9d076-5dbd-4fc9-b231-28b392c9490a-kube-api-access-2rnhq\") pod \"horizon-84c47f46bf-k89t4\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.074772 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-688f876687-qd7b8"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.076325 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.123099 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.123482 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-combined-ca-bundle\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.123750 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j87tf\" (UniqueName: \"kubernetes.io/projected/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-kube-api-access-j87tf\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.123950 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-config\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.124968 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gzxfk\" (UniqueName: \"kubernetes.io/projected/a4226f68-3dce-4cd2-a376-68cec266cd91-kube-api-access-gzxfk\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.125150 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-run-httpd\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.125347 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtdxk\" (UniqueName: \"kubernetes.io/projected/ebf1c326-1c1f-45c6-a9af-f758959b97cf-kube-api-access-xtdxk\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.125506 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-log-httpd\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.125700 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-config-data\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.125977 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-combined-ca-bundle\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.126130 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-scripts\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.126276 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.126466 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-db-sync-config-data\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.126815 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.126902 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-688f876687-qd7b8"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.131135 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-log-httpd\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.132149 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-combined-ca-bundle\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.133627 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-run-httpd\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.137539 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-config\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.138262 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-config-data\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.148313 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-scripts\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.152674 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-8rp42"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.164487 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j87tf\" (UniqueName: \"kubernetes.io/projected/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-kube-api-access-j87tf\") pod \"neutron-db-sync-8krw5\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.170921 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.171019 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-8rp42"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.171921 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.175297 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gzxfk\" (UniqueName: \"kubernetes.io/projected/a4226f68-3dce-4cd2-a376-68cec266cd91-kube-api-access-gzxfk\") pod \"ceilometer-0\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.193244 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.216273 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-7f6km"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.217741 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.220668 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.220679 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.220850 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-m5q5x" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228611 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228700 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-logs\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228752 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-combined-ca-bundle\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228801 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-db-sync-config-data\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228834 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228863 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-config\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228914 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-horizon-secret-key\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228947 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrcrt\" (UniqueName: \"kubernetes.io/projected/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-kube-api-access-zrcrt\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.228970 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.229002 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-scripts\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.229034 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtdxk\" (UniqueName: \"kubernetes.io/projected/ebf1c326-1c1f-45c6-a9af-f758959b97cf-kube-api-access-xtdxk\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.229078 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2cfzp\" (UniqueName: \"kubernetes.io/projected/82d1b8b0-55c7-40a2-9225-9491b5eb0327-kube-api-access-2cfzp\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.229096 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-config-data\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.245154 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-combined-ca-bundle\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.255715 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-db-sync-config-data\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.259541 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8krw5" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.259791 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-7f6km"] Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.270991 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.276878 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtdxk\" (UniqueName: \"kubernetes.io/projected/ebf1c326-1c1f-45c6-a9af-f758959b97cf-kube-api-access-xtdxk\") pod \"barbican-db-sync-v2vbf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.288726 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.301970 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.331724 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-combined-ca-bundle\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.331805 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.331842 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-config\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.331899 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-horizon-secret-key\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.331923 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-config-data\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.331956 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrcrt\" (UniqueName: \"kubernetes.io/projected/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-kube-api-access-zrcrt\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.331987 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332022 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-scripts\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332087 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2cfzp\" (UniqueName: \"kubernetes.io/projected/82d1b8b0-55c7-40a2-9225-9491b5eb0327-kube-api-access-2cfzp\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332112 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-config-data\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332145 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332172 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-scripts\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332206 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vt6b\" (UniqueName: \"kubernetes.io/projected/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-kube-api-access-4vt6b\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332240 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-logs\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.332267 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-logs\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.335856 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-scripts\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.335889 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-sb\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.336940 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-nb\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.338382 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-config\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.339396 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-logs\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.341184 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-config-data\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.344521 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-dns-svc\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.350031 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-horizon-secret-key\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.359562 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2cfzp\" (UniqueName: \"kubernetes.io/projected/82d1b8b0-55c7-40a2-9225-9491b5eb0327-kube-api-access-2cfzp\") pod \"dnsmasq-dns-68dcc9cf6f-8rp42\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.363739 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrcrt\" (UniqueName: \"kubernetes.io/projected/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-kube-api-access-zrcrt\") pod \"horizon-688f876687-qd7b8\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.434489 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-combined-ca-bundle\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.434675 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-config-data\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.434836 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-scripts\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.434883 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vt6b\" (UniqueName: \"kubernetes.io/projected/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-kube-api-access-4vt6b\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.434917 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-logs\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.435398 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-logs\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.442200 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-combined-ca-bundle\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.449170 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-config-data\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.451668 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-scripts\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.473841 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vt6b\" (UniqueName: \"kubernetes.io/projected/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-kube-api-access-4vt6b\") pod \"placement-db-sync-7f6km\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:26 crc kubenswrapper[4868]: I1003 13:10:26.619512 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:26.632808 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:26.666927 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-8dnl9"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:26.667377 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7f6km" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:26.683852 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-hrsb7"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:26.908184 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-84c47f46bf-k89t4"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:26.995428 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"bb0842a9-9947-4561-af16-154496b90622","Type":"ContainerStarted","Data":"f84658a74b80308adc19e9ec6d8dd8abdb19c41c52984d85dcfa5486f7d370a5"} Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.323545 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=74.822141504 podStartE2EDuration="1m25.323516423s" podCreationTimestamp="2025-10-03 13:09:02 +0000 UTC" firstStartedPulling="2025-10-03 13:10:12.731898958 +0000 UTC m=+1208.941748024" lastFinishedPulling="2025-10-03 13:10:23.233273877 +0000 UTC m=+1219.443122943" observedRunningTime="2025-10-03 13:10:27.05522486 +0000 UTC m=+1223.265073936" watchObservedRunningTime="2025-10-03 13:10:27.323516423 +0000 UTC m=+1223.533365489" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.329825 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-8rp42"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.370853 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-p2pkh"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.373181 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.384033 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.392529 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-p2pkh"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.470667 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.470744 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-config\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.470772 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.470804 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.470955 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4x896\" (UniqueName: \"kubernetes.io/projected/f899ff4a-7f0b-4888-992c-791325ed6746-kube-api-access-4x896\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.471018 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.572728 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-config\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.572787 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.572816 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.572913 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4x896\" (UniqueName: \"kubernetes.io/projected/f899ff4a-7f0b-4888-992c-791325ed6746-kube-api-access-4x896\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.572959 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.573097 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.574101 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-config\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.574154 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-svc\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.574534 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-swift-storage-0\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.575016 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-sb\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.575383 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-nb\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.600015 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4x896\" (UniqueName: \"kubernetes.io/projected/f899ff4a-7f0b-4888-992c-791325ed6746-kube-api-access-4x896\") pod \"dnsmasq-dns-58dd9ff6bc-p2pkh\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.694195 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.788139 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-688f876687-qd7b8"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.803622 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.807757 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-66fb9b89c-l5dg5"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.816128 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.822410 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66fb9b89c-l5dg5"] Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.879499 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-logs\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.879576 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm9vt\" (UniqueName: \"kubernetes.io/projected/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-kube-api-access-rm9vt\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.879606 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-horizon-secret-key\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.879777 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-config-data\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.879830 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-scripts\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.982011 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-logs\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.982487 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm9vt\" (UniqueName: \"kubernetes.io/projected/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-kube-api-access-rm9vt\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.982528 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-horizon-secret-key\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.982621 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-config-data\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.982660 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-scripts\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.982866 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-logs\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.983758 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-scripts\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:27.984417 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-config-data\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:28.002299 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-horizon-secret-key\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:28.006020 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm9vt\" (UniqueName: \"kubernetes.io/projected/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-kube-api-access-rm9vt\") pod \"horizon-66fb9b89c-l5dg5\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:28.140995 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:29 crc kubenswrapper[4868]: W1003 13:10:28.654486 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod294bc659_a345_4082_82d9_41f60b1204f3.slice/crio-ebbf3767be37fa32a6fc760b518ba3622d64efa047dd7e812478721023039233 WatchSource:0}: Error finding container ebbf3767be37fa32a6fc760b518ba3622d64efa047dd7e812478721023039233: Status 404 returned error can't find the container with id ebbf3767be37fa32a6fc760b518ba3622d64efa047dd7e812478721023039233 Oct 03 13:10:29 crc kubenswrapper[4868]: W1003 13:10:28.656043 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda6f9d076_5dbd_4fc9_b231_28b392c9490a.slice/crio-a302f1760071cc59bfbcd2744a2b527daaf3b7c835dc503d83de782d129fb3be WatchSource:0}: Error finding container a302f1760071cc59bfbcd2744a2b527daaf3b7c835dc503d83de782d129fb3be: Status 404 returned error can't find the container with id a302f1760071cc59bfbcd2744a2b527daaf3b7c835dc503d83de782d129fb3be Oct 03 13:10:29 crc kubenswrapper[4868]: W1003 13:10:28.659423 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2d70666a_18ba_4a12_b501_2cd7cf55c780.slice/crio-f94065b1b44c991a259995d869b2220884c5f8e1a1eb55d08298019973ab29a3 WatchSource:0}: Error finding container f94065b1b44c991a259995d869b2220884c5f8e1a1eb55d08298019973ab29a3: Status 404 returned error can't find the container with id f94065b1b44c991a259995d869b2220884c5f8e1a1eb55d08298019973ab29a3 Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:29.044393 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84c47f46bf-k89t4" event={"ID":"a6f9d076-5dbd-4fc9-b231-28b392c9490a","Type":"ContainerStarted","Data":"a302f1760071cc59bfbcd2744a2b527daaf3b7c835dc503d83de782d129fb3be"} Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:29.045769 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" event={"ID":"2d70666a-18ba-4a12-b501-2cd7cf55c780","Type":"ContainerStarted","Data":"f94065b1b44c991a259995d869b2220884c5f8e1a1eb55d08298019973ab29a3"} Oct 03 13:10:29 crc kubenswrapper[4868]: I1003 13:10:29.046651 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrsb7" event={"ID":"294bc659-a345-4082-82d9-41f60b1204f3","Type":"ContainerStarted","Data":"ebbf3767be37fa32a6fc760b518ba3622d64efa047dd7e812478721023039233"} Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.068746 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrsb7" event={"ID":"294bc659-a345-4082-82d9-41f60b1204f3","Type":"ContainerStarted","Data":"73ee913a098fd1ba42887e1d61de4a2a08c81c5a7dc9f05dd3a4472ec20c4820"} Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.077132 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pmgh8" event={"ID":"05b10dd2-0b79-4dfe-9a42-52a392f3cbee","Type":"ContainerStarted","Data":"247ce8911c25647e81a760133a366ebff6a0967cbf8fcffb3f0a9f94e6d770f7"} Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.093816 4868 generic.go:334] "Generic (PLEG): container finished" podID="2d70666a-18ba-4a12-b501-2cd7cf55c780" containerID="03ec0d85bcd0ec729298ef9886688bab7b297f379a8b8d73be76a27e92cc492b" exitCode=0 Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.093874 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" event={"ID":"2d70666a-18ba-4a12-b501-2cd7cf55c780","Type":"ContainerDied","Data":"03ec0d85bcd0ec729298ef9886688bab7b297f379a8b8d73be76a27e92cc492b"} Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.107560 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-hrsb7" podStartSLOduration=5.107534563 podStartE2EDuration="5.107534563s" podCreationTimestamp="2025-10-03 13:10:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:10:30.090453986 +0000 UTC m=+1226.300303082" watchObservedRunningTime="2025-10-03 13:10:30.107534563 +0000 UTC m=+1226.317383629" Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.173194 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-pmgh8" podStartSLOduration=7.336995131 podStartE2EDuration="39.173164438s" podCreationTimestamp="2025-10-03 13:09:51 +0000 UTC" firstStartedPulling="2025-10-03 13:09:53.539002401 +0000 UTC m=+1189.748851467" lastFinishedPulling="2025-10-03 13:10:25.375171708 +0000 UTC m=+1221.585020774" observedRunningTime="2025-10-03 13:10:30.134850453 +0000 UTC m=+1226.344699529" watchObservedRunningTime="2025-10-03 13:10:30.173164438 +0000 UTC m=+1226.383013514" Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.890319 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-688f876687-qd7b8"] Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.899435 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.914445 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.929348 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-8rp42"] Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.937196 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-p2pkh"] Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.958751 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-8krw5"] Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.959591 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-sb\") pod \"2d70666a-18ba-4a12-b501-2cd7cf55c780\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.959734 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-nb\") pod \"2d70666a-18ba-4a12-b501-2cd7cf55c780\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.959828 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-config\") pod \"2d70666a-18ba-4a12-b501-2cd7cf55c780\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.959921 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-dns-svc\") pod \"2d70666a-18ba-4a12-b501-2cd7cf55c780\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.960025 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-thx96\" (UniqueName: \"kubernetes.io/projected/2d70666a-18ba-4a12-b501-2cd7cf55c780-kube-api-access-thx96\") pod \"2d70666a-18ba-4a12-b501-2cd7cf55c780\" (UID: \"2d70666a-18ba-4a12-b501-2cd7cf55c780\") " Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.966309 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-mdrmw"] Oct 03 13:10:30 crc kubenswrapper[4868]: W1003 13:10:30.977383 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podac4520ff_5ff9_4f31_bee1_2021e7d491c9.slice/crio-f6ce8675124f39900dbed424d9f224f997122103e5f462f16a35d0e691694e7f WatchSource:0}: Error finding container f6ce8675124f39900dbed424d9f224f997122103e5f462f16a35d0e691694e7f: Status 404 returned error can't find the container with id f6ce8675124f39900dbed424d9f224f997122103e5f462f16a35d0e691694e7f Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.985653 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-v2vbf"] Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.994203 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2d70666a-18ba-4a12-b501-2cd7cf55c780-kube-api-access-thx96" (OuterVolumeSpecName: "kube-api-access-thx96") pod "2d70666a-18ba-4a12-b501-2cd7cf55c780" (UID: "2d70666a-18ba-4a12-b501-2cd7cf55c780"). InnerVolumeSpecName "kube-api-access-thx96". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:30 crc kubenswrapper[4868]: I1003 13:10:30.994345 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-7f6km"] Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.005438 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-66fb9b89c-l5dg5"] Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.014197 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "2d70666a-18ba-4a12-b501-2cd7cf55c780" (UID: "2d70666a-18ba-4a12-b501-2cd7cf55c780"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.059005 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-config" (OuterVolumeSpecName: "config") pod "2d70666a-18ba-4a12-b501-2cd7cf55c780" (UID: "2d70666a-18ba-4a12-b501-2cd7cf55c780"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.062085 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.062118 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-thx96\" (UniqueName: \"kubernetes.io/projected/2d70666a-18ba-4a12-b501-2cd7cf55c780-kube-api-access-thx96\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.062130 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.109669 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7f6km" event={"ID":"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b","Type":"ContainerStarted","Data":"0dffdc482b882789f7e688cbdd30874c9999f2de45c84e2e653fec17237b6fbe"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.113064 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688f876687-qd7b8" event={"ID":"ac4520ff-5ff9-4f31-bee1-2021e7d491c9","Type":"ContainerStarted","Data":"f6ce8675124f39900dbed424d9f224f997122103e5f462f16a35d0e691694e7f"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.114660 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8krw5" event={"ID":"29d1fa62-1c44-47f6-9c4c-c9023d4ef342","Type":"ContainerStarted","Data":"d779da3b645d00c413ac0d61a8895948835926a9134f41c0195170a5a1e68f66"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.115845 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66fb9b89c-l5dg5" event={"ID":"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb","Type":"ContainerStarted","Data":"c8e92995135e440bd432b3e0c1faec862f9d492fe04b515b87031767710f80c9"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.117553 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" event={"ID":"f899ff4a-7f0b-4888-992c-791325ed6746","Type":"ContainerStarted","Data":"a7ee712a4493b0e0ca7e093611eb6bb2eb509d2ab0147d3190d939ff9805aa96"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.118823 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "2d70666a-18ba-4a12-b501-2cd7cf55c780" (UID: "2d70666a-18ba-4a12-b501-2cd7cf55c780"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.120980 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerStarted","Data":"14963551e9ff9d3a96613b7a6507565ceb738a78ab14c7037d3963815e6f07a2"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.125132 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "2d70666a-18ba-4a12-b501-2cd7cf55c780" (UID: "2d70666a-18ba-4a12-b501-2cd7cf55c780"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.125482 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-v2vbf" event={"ID":"ebf1c326-1c1f-45c6-a9af-f758959b97cf","Type":"ContainerStarted","Data":"05b7d2061cde4d0b83d5f5f292be750e0eb5b2ca69565848914e79288c92ddd5"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.128162 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" event={"ID":"82d1b8b0-55c7-40a2-9225-9491b5eb0327","Type":"ContainerStarted","Data":"ac054919d1b07ef57b66fcf1108d826ffd070e842ef4263f294c4350a69e63a2"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.132251 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdrmw" event={"ID":"ff373899-8e15-4a17-a2dc-ae81859fc44e","Type":"ContainerStarted","Data":"1e0b5545fd0e16a90089b5f6b0b435e97faa5c26a8b51c05787184e921d8e86b"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.137134 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.140031 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f877ddd87-8dnl9" event={"ID":"2d70666a-18ba-4a12-b501-2cd7cf55c780","Type":"ContainerDied","Data":"f94065b1b44c991a259995d869b2220884c5f8e1a1eb55d08298019973ab29a3"} Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.140117 4868 scope.go:117] "RemoveContainer" containerID="03ec0d85bcd0ec729298ef9886688bab7b297f379a8b8d73be76a27e92cc492b" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.179178 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.179216 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/2d70666a-18ba-4a12-b501-2cd7cf55c780-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.226115 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-8dnl9"] Oct 03 13:10:31 crc kubenswrapper[4868]: I1003 13:10:31.233171 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-8dnl9"] Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.177456 4868 generic.go:334] "Generic (PLEG): container finished" podID="82d1b8b0-55c7-40a2-9225-9491b5eb0327" containerID="8eb23d73d04197a09b7a5fbc4e708220257cc09ec65114b1bdcdf06c1cde9908" exitCode=0 Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.177573 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" event={"ID":"82d1b8b0-55c7-40a2-9225-9491b5eb0327","Type":"ContainerDied","Data":"8eb23d73d04197a09b7a5fbc4e708220257cc09ec65114b1bdcdf06c1cde9908"} Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.181362 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8krw5" event={"ID":"29d1fa62-1c44-47f6-9c4c-c9023d4ef342","Type":"ContainerStarted","Data":"5fc7d3eab193e67830007fa455e9ab28deddf6084edd6b36056954ab14d726a5"} Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.185902 4868 generic.go:334] "Generic (PLEG): container finished" podID="f899ff4a-7f0b-4888-992c-791325ed6746" containerID="7da7ec9b8b1f4d6fefbc9658911bd533e8674d052260f768b12b52d5d04e1cd7" exitCode=0 Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.185979 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" event={"ID":"f899ff4a-7f0b-4888-992c-791325ed6746","Type":"ContainerDied","Data":"7da7ec9b8b1f4d6fefbc9658911bd533e8674d052260f768b12b52d5d04e1cd7"} Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.317241 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-8krw5" podStartSLOduration=7.317215736 podStartE2EDuration="7.317215736s" podCreationTimestamp="2025-10-03 13:10:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:10:32.259013559 +0000 UTC m=+1228.468862625" watchObservedRunningTime="2025-10-03 13:10:32.317215736 +0000 UTC m=+1228.527064802" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.558281 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2d70666a-18ba-4a12-b501-2cd7cf55c780" path="/var/lib/kubelet/pods/2d70666a-18ba-4a12-b501-2cd7cf55c780/volumes" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.671081 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.745720 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2cfzp\" (UniqueName: \"kubernetes.io/projected/82d1b8b0-55c7-40a2-9225-9491b5eb0327-kube-api-access-2cfzp\") pod \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.745799 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-dns-svc\") pod \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.745838 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-nb\") pod \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.745985 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-sb\") pod \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.746039 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-config\") pod \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\" (UID: \"82d1b8b0-55c7-40a2-9225-9491b5eb0327\") " Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.760151 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82d1b8b0-55c7-40a2-9225-9491b5eb0327-kube-api-access-2cfzp" (OuterVolumeSpecName: "kube-api-access-2cfzp") pod "82d1b8b0-55c7-40a2-9225-9491b5eb0327" (UID: "82d1b8b0-55c7-40a2-9225-9491b5eb0327"). InnerVolumeSpecName "kube-api-access-2cfzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.808915 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "82d1b8b0-55c7-40a2-9225-9491b5eb0327" (UID: "82d1b8b0-55c7-40a2-9225-9491b5eb0327"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.811907 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "82d1b8b0-55c7-40a2-9225-9491b5eb0327" (UID: "82d1b8b0-55c7-40a2-9225-9491b5eb0327"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.819958 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-config" (OuterVolumeSpecName: "config") pod "82d1b8b0-55c7-40a2-9225-9491b5eb0327" (UID: "82d1b8b0-55c7-40a2-9225-9491b5eb0327"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.844879 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "82d1b8b0-55c7-40a2-9225-9491b5eb0327" (UID: "82d1b8b0-55c7-40a2-9225-9491b5eb0327"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.849075 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.849107 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.849118 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2cfzp\" (UniqueName: \"kubernetes.io/projected/82d1b8b0-55c7-40a2-9225-9491b5eb0327-kube-api-access-2cfzp\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.849131 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:32 crc kubenswrapper[4868]: I1003 13:10:32.849139 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/82d1b8b0-55c7-40a2-9225-9491b5eb0327-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.224349 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.224380 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-68dcc9cf6f-8rp42" event={"ID":"82d1b8b0-55c7-40a2-9225-9491b5eb0327","Type":"ContainerDied","Data":"ac054919d1b07ef57b66fcf1108d826ffd070e842ef4263f294c4350a69e63a2"} Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.225034 4868 scope.go:117] "RemoveContainer" containerID="8eb23d73d04197a09b7a5fbc4e708220257cc09ec65114b1bdcdf06c1cde9908" Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.231023 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" event={"ID":"f899ff4a-7f0b-4888-992c-791325ed6746","Type":"ContainerStarted","Data":"aa11c567e72198d8d64aaa240773f0a3029720c508cdfe2c57a6bca1dac3558b"} Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.232677 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.268272 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" podStartSLOduration=6.268234454 podStartE2EDuration="6.268234454s" podCreationTimestamp="2025-10-03 13:10:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:10:33.26136115 +0000 UTC m=+1229.471210236" watchObservedRunningTime="2025-10-03 13:10:33.268234454 +0000 UTC m=+1229.478083550" Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.376139 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-8rp42"] Oct 03 13:10:33 crc kubenswrapper[4868]: I1003 13:10:33.396743 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-68dcc9cf6f-8rp42"] Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.452438 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84c47f46bf-k89t4"] Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.496679 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-545874b5c8-jnl6d"] Oct 03 13:10:34 crc kubenswrapper[4868]: E1003 13:10:34.503426 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82d1b8b0-55c7-40a2-9225-9491b5eb0327" containerName="init" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.503486 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="82d1b8b0-55c7-40a2-9225-9491b5eb0327" containerName="init" Oct 03 13:10:34 crc kubenswrapper[4868]: E1003 13:10:34.503530 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2d70666a-18ba-4a12-b501-2cd7cf55c780" containerName="init" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.503538 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="2d70666a-18ba-4a12-b501-2cd7cf55c780" containerName="init" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.503915 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="82d1b8b0-55c7-40a2-9225-9491b5eb0327" containerName="init" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.503939 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="2d70666a-18ba-4a12-b501-2cd7cf55c780" containerName="init" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.505186 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.512614 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-545874b5c8-jnl6d"] Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.515411 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.581523 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82d1b8b0-55c7-40a2-9225-9491b5eb0327" path="/var/lib/kubelet/pods/82d1b8b0-55c7-40a2-9225-9491b5eb0327/volumes" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.583008 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-66fb9b89c-l5dg5"] Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.606492 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6545c458bd-ttzj2"] Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.610853 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.611353 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0820dcb-cd35-41c2-8977-7d999feab9b2-logs\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.611539 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c58j5\" (UniqueName: \"kubernetes.io/projected/a0820dcb-cd35-41c2-8977-7d999feab9b2-kube-api-access-c58j5\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.611691 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-config-data\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.611743 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-tls-certs\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.611782 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-scripts\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.611822 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-combined-ca-bundle\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.611861 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-secret-key\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.623757 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6545c458bd-ttzj2"] Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714194 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-secret-key\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714266 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-logs\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714331 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0820dcb-cd35-41c2-8977-7d999feab9b2-logs\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714384 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-horizon-tls-certs\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714409 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c58j5\" (UniqueName: \"kubernetes.io/projected/a0820dcb-cd35-41c2-8977-7d999feab9b2-kube-api-access-c58j5\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714473 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-combined-ca-bundle\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714515 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-scripts\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714537 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9bh6\" (UniqueName: \"kubernetes.io/projected/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-kube-api-access-x9bh6\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714567 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-config-data\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714610 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-config-data\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714673 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-tls-certs\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714716 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-horizon-secret-key\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714740 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-scripts\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.714784 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-combined-ca-bundle\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.718657 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0820dcb-cd35-41c2-8977-7d999feab9b2-logs\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.720571 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-config-data\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.721085 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-combined-ca-bundle\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.723234 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-secret-key\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.725914 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-scripts\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.725988 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-tls-certs\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.764167 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c58j5\" (UniqueName: \"kubernetes.io/projected/a0820dcb-cd35-41c2-8977-7d999feab9b2-kube-api-access-c58j5\") pod \"horizon-545874b5c8-jnl6d\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.816915 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-horizon-secret-key\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.817064 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-logs\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.817138 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-horizon-tls-certs\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.817176 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-combined-ca-bundle\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.817213 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-scripts\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.817240 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9bh6\" (UniqueName: \"kubernetes.io/projected/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-kube-api-access-x9bh6\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.817269 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-config-data\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.818045 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-scripts\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.818942 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-config-data\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.819459 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-logs\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.823771 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-horizon-tls-certs\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.825616 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-combined-ca-bundle\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.828041 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-horizon-secret-key\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.843839 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9bh6\" (UniqueName: \"kubernetes.io/projected/8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f-kube-api-access-x9bh6\") pod \"horizon-6545c458bd-ttzj2\" (UID: \"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f\") " pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.867068 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:10:34 crc kubenswrapper[4868]: I1003 13:10:34.937877 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:10:35 crc kubenswrapper[4868]: I1003 13:10:35.269519 4868 generic.go:334] "Generic (PLEG): container finished" podID="294bc659-a345-4082-82d9-41f60b1204f3" containerID="73ee913a098fd1ba42887e1d61de4a2a08c81c5a7dc9f05dd3a4472ec20c4820" exitCode=0 Oct 03 13:10:35 crc kubenswrapper[4868]: I1003 13:10:35.270757 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrsb7" event={"ID":"294bc659-a345-4082-82d9-41f60b1204f3","Type":"ContainerDied","Data":"73ee913a098fd1ba42887e1d61de4a2a08c81c5a7dc9f05dd3a4472ec20c4820"} Oct 03 13:10:35 crc kubenswrapper[4868]: I1003 13:10:35.389033 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-545874b5c8-jnl6d"] Oct 03 13:10:35 crc kubenswrapper[4868]: I1003 13:10:35.578404 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6545c458bd-ttzj2"] Oct 03 13:10:37 crc kubenswrapper[4868]: I1003 13:10:37.696880 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:10:37 crc kubenswrapper[4868]: I1003 13:10:37.765649 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7rdpg"] Oct 03 13:10:37 crc kubenswrapper[4868]: I1003 13:10:37.766553 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-7rdpg" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" containerID="cri-o://e642ea69b42011a7cc9d25ac76a279f4ee2f551567a078e3e121b409930a82ad" gracePeriod=10 Oct 03 13:10:37 crc kubenswrapper[4868]: I1003 13:10:37.878162 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7rdpg" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Oct 03 13:10:38 crc kubenswrapper[4868]: I1003 13:10:38.304491 4868 generic.go:334] "Generic (PLEG): container finished" podID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerID="e642ea69b42011a7cc9d25ac76a279f4ee2f551567a078e3e121b409930a82ad" exitCode=0 Oct 03 13:10:38 crc kubenswrapper[4868]: I1003 13:10:38.304551 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7rdpg" event={"ID":"c47f90b9-aeac-456b-b26f-0cde9556f32f","Type":"ContainerDied","Data":"e642ea69b42011a7cc9d25ac76a279f4ee2f551567a078e3e121b409930a82ad"} Oct 03 13:10:42 crc kubenswrapper[4868]: I1003 13:10:42.877776 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7rdpg" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Oct 03 13:10:45 crc kubenswrapper[4868]: E1003 13:10:45.737832 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 03 13:10:45 crc kubenswrapper[4868]: E1003 13:10:45.738385 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n576hb4hb7h5f7h6ch557h5b6h9ch66bh5bbhb9h68bhf7h87h64bh578h5bdh55fh5c5hd6h546h575h54fh67bh675hc5h578h684h54h565h58bh568q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2rnhq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-84c47f46bf-k89t4_openstack(a6f9d076-5dbd-4fc9-b231-28b392c9490a): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:10:45 crc kubenswrapper[4868]: E1003 13:10:45.740949 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-84c47f46bf-k89t4" podUID="a6f9d076-5dbd-4fc9-b231-28b392c9490a" Oct 03 13:10:46 crc kubenswrapper[4868]: I1003 13:10:46.376953 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-545874b5c8-jnl6d" event={"ID":"a0820dcb-cd35-41c2-8977-7d999feab9b2","Type":"ContainerStarted","Data":"c512263a0aa15c65bbe4ccf2b608c270a37c26f6068b89d690a60f079f1cc65a"} Oct 03 13:10:47 crc kubenswrapper[4868]: I1003 13:10:47.877757 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7rdpg" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Oct 03 13:10:47 crc kubenswrapper[4868]: I1003 13:10:47.877911 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:10:48 crc kubenswrapper[4868]: W1003 13:10:48.051911 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8fbf3ab7_41a0_44dc_9fc1_e74ec76d395f.slice/crio-5043d7997dd3e40e7ccbdfeca7039d0c064b26a02cf7b51a4370d8336c477bea WatchSource:0}: Error finding container 5043d7997dd3e40e7ccbdfeca7039d0c064b26a02cf7b51a4370d8336c477bea: Status 404 returned error can't find the container with id 5043d7997dd3e40e7ccbdfeca7039d0c064b26a02cf7b51a4370d8336c477bea Oct 03 13:10:48 crc kubenswrapper[4868]: I1003 13:10:48.395569 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6545c458bd-ttzj2" event={"ID":"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f","Type":"ContainerStarted","Data":"5043d7997dd3e40e7ccbdfeca7039d0c064b26a02cf7b51a4370d8336c477bea"} Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.051604 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-placement-api:current-podified" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.052073 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:placement-db-sync,Image:quay.io/podified-antelope-centos9/openstack-placement-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/placement,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:false,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:placement-dbsync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-4vt6b,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42482,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-db-sync-7f6km_openstack(cf1f9cd5-4be0-47d6-a72b-46c83aebb53b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.053236 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/placement-db-sync-7f6km" podUID="cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.340093 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.340318 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5bh67h559h97h648h68dh68dh88h556h58fh654h668h686h7dh54dh6ch654h9dh8bh94h54bh675h97h68fh68ch596h5b4hfh77hddh59dh556q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gzxfk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(a4226f68-3dce-4cd2-a376-68cec266cd91): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.394988 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.395250 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68h685h55ch548h54ch67h695h545h5f4hf8h98h4h689h54dh85h686h6ch5bbh5d6hffhddh64dh6dhdh75h57dh556h685h66bhf8hbfh65dq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rm9vt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-66fb9b89c-l5dg5_openstack(56c0ed56-9662-474a-bf04-6f4a8d9bd9cb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.399736 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-66fb9b89c-l5dg5" podUID="56c0ed56-9662-474a-bf04-6f4a8d9bd9cb" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.420425 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-84c47f46bf-k89t4" event={"ID":"a6f9d076-5dbd-4fc9-b231-28b392c9490a","Type":"ContainerDied","Data":"a302f1760071cc59bfbcd2744a2b527daaf3b7c835dc503d83de782d129fb3be"} Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.420489 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a302f1760071cc59bfbcd2744a2b527daaf3b7c835dc503d83de782d129fb3be" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.425218 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-hrsb7" event={"ID":"294bc659-a345-4082-82d9-41f60b1204f3","Type":"ContainerDied","Data":"ebbf3767be37fa32a6fc760b518ba3622d64efa047dd7e812478721023039233"} Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.425274 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ebbf3767be37fa32a6fc760b518ba3622d64efa047dd7e812478721023039233" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.426765 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"placement-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-placement-api:current-podified\\\"\"" pod="openstack/placement-db-sync-7f6km" podUID="cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.439460 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.439723 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfch69h5bchf7h569hc7h67h5f4h64h64bh8h564h5c7h64ch54ch67bh88h574h55chb4h68fh56chbdhc8h59chb7h57fh5ffhc7h6h54fhd9q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zrcrt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-688f876687-qd7b8_openstack(ac4520ff-5ff9-4f31-bee1-2021e7d491c9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:10:50 crc kubenswrapper[4868]: E1003 13:10:50.445450 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-688f876687-qd7b8" podUID="ac4520ff-5ff9-4f31-bee1-2021e7d491c9" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.458557 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.471168 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.561079 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-btnxg\" (UniqueName: \"kubernetes.io/projected/294bc659-a345-4082-82d9-41f60b1204f3-kube-api-access-btnxg\") pod \"294bc659-a345-4082-82d9-41f60b1204f3\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.561160 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-fernet-keys\") pod \"294bc659-a345-4082-82d9-41f60b1204f3\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.561189 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-credential-keys\") pod \"294bc659-a345-4082-82d9-41f60b1204f3\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.561260 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-combined-ca-bundle\") pod \"294bc659-a345-4082-82d9-41f60b1204f3\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.561320 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-config-data\") pod \"294bc659-a345-4082-82d9-41f60b1204f3\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.561526 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-scripts\") pod \"294bc659-a345-4082-82d9-41f60b1204f3\" (UID: \"294bc659-a345-4082-82d9-41f60b1204f3\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.568588 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "294bc659-a345-4082-82d9-41f60b1204f3" (UID: "294bc659-a345-4082-82d9-41f60b1204f3"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.569478 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-scripts" (OuterVolumeSpecName: "scripts") pod "294bc659-a345-4082-82d9-41f60b1204f3" (UID: "294bc659-a345-4082-82d9-41f60b1204f3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.571473 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/294bc659-a345-4082-82d9-41f60b1204f3-kube-api-access-btnxg" (OuterVolumeSpecName: "kube-api-access-btnxg") pod "294bc659-a345-4082-82d9-41f60b1204f3" (UID: "294bc659-a345-4082-82d9-41f60b1204f3"). InnerVolumeSpecName "kube-api-access-btnxg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.589804 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "294bc659-a345-4082-82d9-41f60b1204f3" (UID: "294bc659-a345-4082-82d9-41f60b1204f3"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.596270 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-config-data" (OuterVolumeSpecName: "config-data") pod "294bc659-a345-4082-82d9-41f60b1204f3" (UID: "294bc659-a345-4082-82d9-41f60b1204f3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.597714 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "294bc659-a345-4082-82d9-41f60b1204f3" (UID: "294bc659-a345-4082-82d9-41f60b1204f3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.665579 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-config-data\") pod \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.666096 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2rnhq\" (UniqueName: \"kubernetes.io/projected/a6f9d076-5dbd-4fc9-b231-28b392c9490a-kube-api-access-2rnhq\") pod \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.666182 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6f9d076-5dbd-4fc9-b231-28b392c9490a-logs\") pod \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.666293 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-scripts\") pod \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.666342 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a6f9d076-5dbd-4fc9-b231-28b392c9490a-horizon-secret-key\") pod \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\" (UID: \"a6f9d076-5dbd-4fc9-b231-28b392c9490a\") " Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.668327 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.668350 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-btnxg\" (UniqueName: \"kubernetes.io/projected/294bc659-a345-4082-82d9-41f60b1204f3-kube-api-access-btnxg\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.668363 4868 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.668375 4868 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.668386 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.668397 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/294bc659-a345-4082-82d9-41f60b1204f3-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.670209 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-config-data" (OuterVolumeSpecName: "config-data") pod "a6f9d076-5dbd-4fc9-b231-28b392c9490a" (UID: "a6f9d076-5dbd-4fc9-b231-28b392c9490a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.671938 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6f9d076-5dbd-4fc9-b231-28b392c9490a-logs" (OuterVolumeSpecName: "logs") pod "a6f9d076-5dbd-4fc9-b231-28b392c9490a" (UID: "a6f9d076-5dbd-4fc9-b231-28b392c9490a"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.672540 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-scripts" (OuterVolumeSpecName: "scripts") pod "a6f9d076-5dbd-4fc9-b231-28b392c9490a" (UID: "a6f9d076-5dbd-4fc9-b231-28b392c9490a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.685317 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a6f9d076-5dbd-4fc9-b231-28b392c9490a-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a6f9d076-5dbd-4fc9-b231-28b392c9490a" (UID: "a6f9d076-5dbd-4fc9-b231-28b392c9490a"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.697084 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6f9d076-5dbd-4fc9-b231-28b392c9490a-kube-api-access-2rnhq" (OuterVolumeSpecName: "kube-api-access-2rnhq") pod "a6f9d076-5dbd-4fc9-b231-28b392c9490a" (UID: "a6f9d076-5dbd-4fc9-b231-28b392c9490a"). InnerVolumeSpecName "kube-api-access-2rnhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.770451 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2rnhq\" (UniqueName: \"kubernetes.io/projected/a6f9d076-5dbd-4fc9-b231-28b392c9490a-kube-api-access-2rnhq\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.770720 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a6f9d076-5dbd-4fc9-b231-28b392c9490a-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.770788 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.770849 4868 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a6f9d076-5dbd-4fc9-b231-28b392c9490a-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:50 crc kubenswrapper[4868]: I1003 13:10:50.770900 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a6f9d076-5dbd-4fc9-b231-28b392c9490a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.432686 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-84c47f46bf-k89t4" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.432972 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-hrsb7" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.502985 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-84c47f46bf-k89t4"] Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.521075 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-84c47f46bf-k89t4"] Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.571610 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-hrsb7"] Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.579235 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-hrsb7"] Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.674989 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-txpvc"] Oct 03 13:10:51 crc kubenswrapper[4868]: E1003 13:10:51.675423 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="294bc659-a345-4082-82d9-41f60b1204f3" containerName="keystone-bootstrap" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.675468 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="294bc659-a345-4082-82d9-41f60b1204f3" containerName="keystone-bootstrap" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.675696 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="294bc659-a345-4082-82d9-41f60b1204f3" containerName="keystone-bootstrap" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.676336 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.680369 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.680546 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.680651 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8g5zj" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.680877 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.685392 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-txpvc"] Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.788194 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-config-data\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.788250 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-credential-keys\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.788278 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-scripts\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.788295 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-combined-ca-bundle\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.788510 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-fernet-keys\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.788702 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n77hw\" (UniqueName: \"kubernetes.io/projected/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-kube-api-access-n77hw\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.890425 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-fernet-keys\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.890493 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n77hw\" (UniqueName: \"kubernetes.io/projected/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-kube-api-access-n77hw\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.890567 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-config-data\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.890599 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-credential-keys\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.890626 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-scripts\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.890641 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-combined-ca-bundle\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.897215 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-combined-ca-bundle\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.897533 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-credential-keys\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.898649 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-scripts\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.898947 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-fernet-keys\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.906444 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-config-data\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.908691 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n77hw\" (UniqueName: \"kubernetes.io/projected/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-kube-api-access-n77hw\") pod \"keystone-bootstrap-txpvc\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:51 crc kubenswrapper[4868]: I1003 13:10:51.996361 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:10:52 crc kubenswrapper[4868]: I1003 13:10:52.557167 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="294bc659-a345-4082-82d9-41f60b1204f3" path="/var/lib/kubelet/pods/294bc659-a345-4082-82d9-41f60b1204f3/volumes" Oct 03 13:10:52 crc kubenswrapper[4868]: I1003 13:10:52.558239 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6f9d076-5dbd-4fc9-b231-28b392c9490a" path="/var/lib/kubelet/pods/a6f9d076-5dbd-4fc9-b231-28b392c9490a/volumes" Oct 03 13:10:57 crc kubenswrapper[4868]: I1003 13:10:57.877949 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7rdpg" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: i/o timeout" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.933957 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.938014 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.965218 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976029 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-nb\") pod \"c47f90b9-aeac-456b-b26f-0cde9556f32f\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976131 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm9vt\" (UniqueName: \"kubernetes.io/projected/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-kube-api-access-rm9vt\") pod \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976156 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-logs\") pod \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976184 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-config-data\") pod \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976256 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-dns-svc\") pod \"c47f90b9-aeac-456b-b26f-0cde9556f32f\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976275 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-scripts\") pod \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976297 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cvhvf\" (UniqueName: \"kubernetes.io/projected/c47f90b9-aeac-456b-b26f-0cde9556f32f-kube-api-access-cvhvf\") pod \"c47f90b9-aeac-456b-b26f-0cde9556f32f\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976381 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-horizon-secret-key\") pod \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\" (UID: \"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.976404 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-sb\") pod \"c47f90b9-aeac-456b-b26f-0cde9556f32f\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.977041 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-config\") pod \"c47f90b9-aeac-456b-b26f-0cde9556f32f\" (UID: \"c47f90b9-aeac-456b-b26f-0cde9556f32f\") " Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.977161 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-scripts" (OuterVolumeSpecName: "scripts") pod "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb" (UID: "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.977281 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-config-data" (OuterVolumeSpecName: "config-data") pod "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb" (UID: "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.977545 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.977563 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.977932 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-logs" (OuterVolumeSpecName: "logs") pod "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb" (UID: "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.983648 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-kube-api-access-rm9vt" (OuterVolumeSpecName: "kube-api-access-rm9vt") pod "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb" (UID: "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb"). InnerVolumeSpecName "kube-api-access-rm9vt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:10:59 crc kubenswrapper[4868]: I1003 13:10:59.990937 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c47f90b9-aeac-456b-b26f-0cde9556f32f-kube-api-access-cvhvf" (OuterVolumeSpecName: "kube-api-access-cvhvf") pod "c47f90b9-aeac-456b-b26f-0cde9556f32f" (UID: "c47f90b9-aeac-456b-b26f-0cde9556f32f"). InnerVolumeSpecName "kube-api-access-cvhvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.005117 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb" (UID: "56c0ed56-9662-474a-bf04-6f4a8d9bd9cb"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.058008 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "c47f90b9-aeac-456b-b26f-0cde9556f32f" (UID: "c47f90b9-aeac-456b-b26f-0cde9556f32f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.068321 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-config" (OuterVolumeSpecName: "config") pod "c47f90b9-aeac-456b-b26f-0cde9556f32f" (UID: "c47f90b9-aeac-456b-b26f-0cde9556f32f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.074772 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "c47f90b9-aeac-456b-b26f-0cde9556f32f" (UID: "c47f90b9-aeac-456b-b26f-0cde9556f32f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.079310 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-horizon-secret-key\") pod \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.079398 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-config-data\") pod \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.079465 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-scripts\") pod \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.079533 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-logs\") pod \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.079635 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrcrt\" (UniqueName: \"kubernetes.io/projected/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-kube-api-access-zrcrt\") pod \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\" (UID: \"ac4520ff-5ff9-4f31-bee1-2021e7d491c9\") " Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080029 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-scripts" (OuterVolumeSpecName: "scripts") pod "ac4520ff-5ff9-4f31-bee1-2021e7d491c9" (UID: "ac4520ff-5ff9-4f31-bee1-2021e7d491c9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080484 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-logs" (OuterVolumeSpecName: "logs") pod "ac4520ff-5ff9-4f31-bee1-2021e7d491c9" (UID: "ac4520ff-5ff9-4f31-bee1-2021e7d491c9"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080613 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-config-data" (OuterVolumeSpecName: "config-data") pod "ac4520ff-5ff9-4f31-bee1-2021e7d491c9" (UID: "ac4520ff-5ff9-4f31-bee1-2021e7d491c9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080467 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080672 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080703 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080714 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm9vt\" (UniqueName: \"kubernetes.io/projected/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-kube-api-access-rm9vt\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080723 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cvhvf\" (UniqueName: \"kubernetes.io/projected/c47f90b9-aeac-456b-b26f-0cde9556f32f-kube-api-access-cvhvf\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080735 4868 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080743 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.080752 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.083126 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "c47f90b9-aeac-456b-b26f-0cde9556f32f" (UID: "c47f90b9-aeac-456b-b26f-0cde9556f32f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.083704 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "ac4520ff-5ff9-4f31-bee1-2021e7d491c9" (UID: "ac4520ff-5ff9-4f31-bee1-2021e7d491c9"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.084737 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-kube-api-access-zrcrt" (OuterVolumeSpecName: "kube-api-access-zrcrt") pod "ac4520ff-5ff9-4f31-bee1-2021e7d491c9" (UID: "ac4520ff-5ff9-4f31-bee1-2021e7d491c9"). InnerVolumeSpecName "kube-api-access-zrcrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.132327 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-txpvc"] Oct 03 13:11:00 crc kubenswrapper[4868]: W1003 13:11:00.137444 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podbcabf93f_09c3_4dd6_8ba9_2556afc0c15b.slice/crio-02e078d60459b98f4162e910b965352940e612d91771acbf989d2eaf655e16dd WatchSource:0}: Error finding container 02e078d60459b98f4162e910b965352940e612d91771acbf989d2eaf655e16dd: Status 404 returned error can't find the container with id 02e078d60459b98f4162e910b965352940e612d91771acbf989d2eaf655e16dd Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.182832 4868 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.182869 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.182878 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.182889 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrcrt\" (UniqueName: \"kubernetes.io/projected/ac4520ff-5ff9-4f31-bee1-2021e7d491c9-kube-api-access-zrcrt\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.182901 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/c47f90b9-aeac-456b-b26f-0cde9556f32f-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.537208 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-688f876687-qd7b8" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.537237 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-688f876687-qd7b8" event={"ID":"ac4520ff-5ff9-4f31-bee1-2021e7d491c9","Type":"ContainerDied","Data":"f6ce8675124f39900dbed424d9f224f997122103e5f462f16a35d0e691694e7f"} Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.541228 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-7rdpg" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.541220 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-7rdpg" event={"ID":"c47f90b9-aeac-456b-b26f-0cde9556f32f","Type":"ContainerDied","Data":"bd4a26a43160326e6224322ef4f68c81f7b9224d43eeea946a559d5648ace8e9"} Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.541389 4868 scope.go:117] "RemoveContainer" containerID="e642ea69b42011a7cc9d25ac76a279f4ee2f551567a078e3e121b409930a82ad" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.545003 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-66fb9b89c-l5dg5" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.557962 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txpvc" event={"ID":"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b","Type":"ContainerStarted","Data":"02e078d60459b98f4162e910b965352940e612d91771acbf989d2eaf655e16dd"} Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.558002 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-66fb9b89c-l5dg5" event={"ID":"56c0ed56-9662-474a-bf04-6f4a8d9bd9cb","Type":"ContainerDied","Data":"c8e92995135e440bd432b3e0c1faec862f9d492fe04b515b87031767710f80c9"} Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.583446 4868 scope.go:117] "RemoveContainer" containerID="cc5780bd24d45fdd5665210c06d23ada6a083cd8ae063f74bb50f3248179b724" Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.583834 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7rdpg"] Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.603959 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-7rdpg"] Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.656112 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-66fb9b89c-l5dg5"] Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.667419 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-66fb9b89c-l5dg5"] Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.682747 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-688f876687-qd7b8"] Oct 03 13:11:00 crc kubenswrapper[4868]: I1003 13:11:00.720859 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-688f876687-qd7b8"] Oct 03 13:11:00 crc kubenswrapper[4868]: E1003 13:11:00.749951 4868 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Oct 03 13:11:00 crc kubenswrapper[4868]: E1003 13:11:00.750265 4868 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9tcwf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-mdrmw_openstack(ff373899-8e15-4a17-a2dc-ae81859fc44e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Oct 03 13:11:00 crc kubenswrapper[4868]: E1003 13:11:00.752141 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-mdrmw" podUID="ff373899-8e15-4a17-a2dc-ae81859fc44e" Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.557497 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerStarted","Data":"8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8"} Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.561575 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-545874b5c8-jnl6d" event={"ID":"a0820dcb-cd35-41c2-8977-7d999feab9b2","Type":"ContainerStarted","Data":"88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab"} Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.561624 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-545874b5c8-jnl6d" event={"ID":"a0820dcb-cd35-41c2-8977-7d999feab9b2","Type":"ContainerStarted","Data":"6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9"} Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.563044 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-v2vbf" event={"ID":"ebf1c326-1c1f-45c6-a9af-f758959b97cf","Type":"ContainerStarted","Data":"4003895d18873ec4c7e75627e60e8bbd003495f765e71d869bbf905e972a8261"} Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.565978 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6545c458bd-ttzj2" event={"ID":"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f","Type":"ContainerStarted","Data":"696470680e91edcf55d5dc8d7d995dbabac3e23aa564726e66ec7ad90ceed0b4"} Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.566010 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6545c458bd-ttzj2" event={"ID":"8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f","Type":"ContainerStarted","Data":"a6846cabb0e190dd32a8971619f01ef44584010f7e950bb4b34c7f3000ef3334"} Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.570500 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txpvc" event={"ID":"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b","Type":"ContainerStarted","Data":"bd8d2315c652f0a51793e098f994c8c5794adbc6676fa5afcf3d48151a0064c9"} Oct 03 13:11:01 crc kubenswrapper[4868]: E1003 13:11:01.571445 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-mdrmw" podUID="ff373899-8e15-4a17-a2dc-ae81859fc44e" Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.618703 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-545874b5c8-jnl6d" podStartSLOduration=12.302525494 podStartE2EDuration="27.618680782s" podCreationTimestamp="2025-10-03 13:10:34 +0000 UTC" firstStartedPulling="2025-10-03 13:10:45.692820261 +0000 UTC m=+1241.902669327" lastFinishedPulling="2025-10-03 13:11:01.008975549 +0000 UTC m=+1257.218824615" observedRunningTime="2025-10-03 13:11:01.595530412 +0000 UTC m=+1257.805379478" watchObservedRunningTime="2025-10-03 13:11:01.618680782 +0000 UTC m=+1257.828529868" Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.623161 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-6545c458bd-ttzj2" podStartSLOduration=14.666640956 podStartE2EDuration="27.623137491s" podCreationTimestamp="2025-10-03 13:10:34 +0000 UTC" firstStartedPulling="2025-10-03 13:10:48.055941916 +0000 UTC m=+1244.265790982" lastFinishedPulling="2025-10-03 13:11:01.012438451 +0000 UTC m=+1257.222287517" observedRunningTime="2025-10-03 13:11:01.61522898 +0000 UTC m=+1257.825078056" watchObservedRunningTime="2025-10-03 13:11:01.623137491 +0000 UTC m=+1257.832986557" Oct 03 13:11:01 crc kubenswrapper[4868]: I1003 13:11:01.652420 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-txpvc" podStartSLOduration=10.652396776 podStartE2EDuration="10.652396776s" podCreationTimestamp="2025-10-03 13:10:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:01.64321168 +0000 UTC m=+1257.853060766" watchObservedRunningTime="2025-10-03 13:11:01.652396776 +0000 UTC m=+1257.862245852" Oct 03 13:11:02 crc kubenswrapper[4868]: I1003 13:11:02.564069 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56c0ed56-9662-474a-bf04-6f4a8d9bd9cb" path="/var/lib/kubelet/pods/56c0ed56-9662-474a-bf04-6f4a8d9bd9cb/volumes" Oct 03 13:11:02 crc kubenswrapper[4868]: I1003 13:11:02.565449 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ac4520ff-5ff9-4f31-bee1-2021e7d491c9" path="/var/lib/kubelet/pods/ac4520ff-5ff9-4f31-bee1-2021e7d491c9/volumes" Oct 03 13:11:02 crc kubenswrapper[4868]: I1003 13:11:02.565876 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" path="/var/lib/kubelet/pods/c47f90b9-aeac-456b-b26f-0cde9556f32f/volumes" Oct 03 13:11:02 crc kubenswrapper[4868]: I1003 13:11:02.586108 4868 generic.go:334] "Generic (PLEG): container finished" podID="05b10dd2-0b79-4dfe-9a42-52a392f3cbee" containerID="247ce8911c25647e81a760133a366ebff6a0967cbf8fcffb3f0a9f94e6d770f7" exitCode=0 Oct 03 13:11:02 crc kubenswrapper[4868]: I1003 13:11:02.587273 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pmgh8" event={"ID":"05b10dd2-0b79-4dfe-9a42-52a392f3cbee","Type":"ContainerDied","Data":"247ce8911c25647e81a760133a366ebff6a0967cbf8fcffb3f0a9f94e6d770f7"} Oct 03 13:11:02 crc kubenswrapper[4868]: I1003 13:11:02.614978 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-v2vbf" podStartSLOduration=8.932485073 podStartE2EDuration="37.614957198s" podCreationTimestamp="2025-10-03 13:10:25 +0000 UTC" firstStartedPulling="2025-10-03 13:10:30.998994309 +0000 UTC m=+1227.208843375" lastFinishedPulling="2025-10-03 13:10:59.681466434 +0000 UTC m=+1255.891315500" observedRunningTime="2025-10-03 13:11:01.688766801 +0000 UTC m=+1257.898615867" watchObservedRunningTime="2025-10-03 13:11:02.614957198 +0000 UTC m=+1258.824806264" Oct 03 13:11:02 crc kubenswrapper[4868]: I1003 13:11:02.879818 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-7rdpg" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: i/o timeout" Oct 03 13:11:04 crc kubenswrapper[4868]: I1003 13:11:04.867796 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:11:04 crc kubenswrapper[4868]: I1003 13:11:04.868401 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:11:04 crc kubenswrapper[4868]: I1003 13:11:04.938129 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:11:04 crc kubenswrapper[4868]: I1003 13:11:04.938197 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.174929 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pmgh8" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.260720 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data\") pod \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.260922 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data\") pod \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.261108 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg496\" (UniqueName: \"kubernetes.io/projected/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-kube-api-access-tg496\") pod \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.261380 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-combined-ca-bundle\") pod \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\" (UID: \"05b10dd2-0b79-4dfe-9a42-52a392f3cbee\") " Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.267324 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-kube-api-access-tg496" (OuterVolumeSpecName: "kube-api-access-tg496") pod "05b10dd2-0b79-4dfe-9a42-52a392f3cbee" (UID: "05b10dd2-0b79-4dfe-9a42-52a392f3cbee"). InnerVolumeSpecName "kube-api-access-tg496". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.278264 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "05b10dd2-0b79-4dfe-9a42-52a392f3cbee" (UID: "05b10dd2-0b79-4dfe-9a42-52a392f3cbee"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.288904 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "05b10dd2-0b79-4dfe-9a42-52a392f3cbee" (UID: "05b10dd2-0b79-4dfe-9a42-52a392f3cbee"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.316002 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data" (OuterVolumeSpecName: "config-data") pod "05b10dd2-0b79-4dfe-9a42-52a392f3cbee" (UID: "05b10dd2-0b79-4dfe-9a42-52a392f3cbee"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.364766 4868 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.364826 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.364840 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg496\" (UniqueName: \"kubernetes.io/projected/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-kube-api-access-tg496\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.364855 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/05b10dd2-0b79-4dfe-9a42-52a392f3cbee-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.653537 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-pmgh8" event={"ID":"05b10dd2-0b79-4dfe-9a42-52a392f3cbee","Type":"ContainerDied","Data":"fa1355b4e15fdf470a9101c121ab1a6be9a2a1c28c4dc6ba0406be98ecb5ab12"} Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.653585 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa1355b4e15fdf470a9101c121ab1a6be9a2a1c28c4dc6ba0406be98ecb5ab12" Oct 03 13:11:08 crc kubenswrapper[4868]: I1003 13:11:08.653659 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-pmgh8" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.638982 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4tw2b"] Oct 03 13:11:09 crc kubenswrapper[4868]: E1003 13:11:09.639825 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05b10dd2-0b79-4dfe-9a42-52a392f3cbee" containerName="glance-db-sync" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.639845 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="05b10dd2-0b79-4dfe-9a42-52a392f3cbee" containerName="glance-db-sync" Oct 03 13:11:09 crc kubenswrapper[4868]: E1003 13:11:09.639877 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.639886 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" Oct 03 13:11:09 crc kubenswrapper[4868]: E1003 13:11:09.639927 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="init" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.639937 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="init" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.640191 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="c47f90b9-aeac-456b-b26f-0cde9556f32f" containerName="dnsmasq-dns" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.640206 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="05b10dd2-0b79-4dfe-9a42-52a392f3cbee" containerName="glance-db-sync" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.641500 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.682652 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4tw2b"] Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.803690 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.803774 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.803852 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.803909 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.803938 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jsd9\" (UniqueName: \"kubernetes.io/projected/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-kube-api-access-5jsd9\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.804021 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-config\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.906411 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.906521 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.906623 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.906793 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.906840 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jsd9\" (UniqueName: \"kubernetes.io/projected/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-kube-api-access-5jsd9\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.907002 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-config\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.908555 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-config\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.909173 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.909427 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.918143 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.919117 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.940961 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jsd9\" (UniqueName: \"kubernetes.io/projected/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-kube-api-access-5jsd9\") pod \"dnsmasq-dns-785d8bcb8c-4tw2b\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:09 crc kubenswrapper[4868]: I1003 13:11:09.991004 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.684206 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.686256 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.688747 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.692161 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-nrct9" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.693129 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.703939 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.737221 4868 generic.go:334] "Generic (PLEG): container finished" podID="bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" containerID="bd8d2315c652f0a51793e098f994c8c5794adbc6676fa5afcf3d48151a0064c9" exitCode=0 Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.737287 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txpvc" event={"ID":"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b","Type":"ContainerDied","Data":"bd8d2315c652f0a51793e098f994c8c5794adbc6676fa5afcf3d48151a0064c9"} Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.807971 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.810446 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.814746 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.825427 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.846185 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.846229 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-logs\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.846255 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.846275 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.846300 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.846319 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wfrl\" (UniqueName: \"kubernetes.io/projected/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-kube-api-access-4wfrl\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.846346 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948539 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rz5mq\" (UniqueName: \"kubernetes.io/projected/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-kube-api-access-rz5mq\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948614 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-scripts\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948697 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948728 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-logs\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948749 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948775 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948811 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948861 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948900 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948925 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-logs\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948946 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948965 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wfrl\" (UniqueName: \"kubernetes.io/projected/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-kube-api-access-4wfrl\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.948998 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.949026 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-config-data\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.950352 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.950442 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.955415 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-logs\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.957921 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.958615 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.969524 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.979034 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wfrl\" (UniqueName: \"kubernetes.io/projected/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-kube-api-access-4wfrl\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:10 crc kubenswrapper[4868]: I1003 13:11:10.999155 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.049746 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.050524 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.050602 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.050659 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-logs\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.050702 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.050703 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.050969 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-config-data\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.051168 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rz5mq\" (UniqueName: \"kubernetes.io/projected/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-kube-api-access-rz5mq\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.051204 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-logs\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.051271 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.051366 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-scripts\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.058206 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-config-data\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.060328 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-scripts\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.060679 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.073247 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rz5mq\" (UniqueName: \"kubernetes.io/projected/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-kube-api-access-rz5mq\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.082883 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.144671 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.190321 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4tw2b"] Oct 03 13:11:11 crc kubenswrapper[4868]: I1003 13:11:11.762973 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" event={"ID":"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346","Type":"ContainerStarted","Data":"d78447735695708130b0bf57bbc075de47b277990d09acde87e5778349607c15"} Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.377361 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.394988 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.462088 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.512985 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-config-data\") pod \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.513038 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-credential-keys\") pod \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.513186 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-scripts\") pod \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.513356 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-combined-ca-bundle\") pod \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.513392 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n77hw\" (UniqueName: \"kubernetes.io/projected/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-kube-api-access-n77hw\") pod \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.513414 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-fernet-keys\") pod \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\" (UID: \"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b\") " Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.524294 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-scripts" (OuterVolumeSpecName: "scripts") pod "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" (UID: "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.526621 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-kube-api-access-n77hw" (OuterVolumeSpecName: "kube-api-access-n77hw") pod "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" (UID: "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b"). InnerVolumeSpecName "kube-api-access-n77hw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.526800 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" (UID: "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.527168 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" (UID: "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.554521 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" (UID: "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.560164 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-config-data" (OuterVolumeSpecName: "config-data") pod "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" (UID: "bcabf93f-09c3-4dd6-8ba9-2556afc0c15b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.616005 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.616034 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.616043 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n77hw\" (UniqueName: \"kubernetes.io/projected/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-kube-api-access-n77hw\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.616073 4868 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.616087 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.616099 4868 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b-credential-keys\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.787884 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerStarted","Data":"a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c"} Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.790215 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7f6km" event={"ID":"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b","Type":"ContainerStarted","Data":"8163c9a92feaddd362cb2dfff8aa650a81a4d993a162ff1d953fdcb7b94f1995"} Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.802551 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-txpvc" event={"ID":"bcabf93f-09c3-4dd6-8ba9-2556afc0c15b","Type":"ContainerDied","Data":"02e078d60459b98f4162e910b965352940e612d91771acbf989d2eaf655e16dd"} Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.802613 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="02e078d60459b98f4162e910b965352940e612d91771acbf989d2eaf655e16dd" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.802723 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-txpvc" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.816708 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17fa6af0-60a1-4c36-a301-c7fbe6eb4546","Type":"ContainerStarted","Data":"776325e002e150bb2b00fe246e124a1f379309ff480f78e9c36542a381ecda39"} Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.831933 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-7f6km" podStartSLOduration=7.324978327 podStartE2EDuration="47.831904499s" podCreationTimestamp="2025-10-03 13:10:25 +0000 UTC" firstStartedPulling="2025-10-03 13:10:31.064356176 +0000 UTC m=+1227.274205232" lastFinishedPulling="2025-10-03 13:11:11.571282328 +0000 UTC m=+1267.781131404" observedRunningTime="2025-10-03 13:11:12.81704042 +0000 UTC m=+1269.026889486" watchObservedRunningTime="2025-10-03 13:11:12.831904499 +0000 UTC m=+1269.041753585" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.835713 4868 generic.go:334] "Generic (PLEG): container finished" podID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerID="da49d5b893f722525be0b8ab0c35bf94be1fb05b19232feba526bba6347ebc07" exitCode=0 Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.835821 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" event={"ID":"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346","Type":"ContainerDied","Data":"da49d5b893f722525be0b8ab0c35bf94be1fb05b19232feba526bba6347ebc07"} Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.874560 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb","Type":"ContainerStarted","Data":"6993d4483e603ded268bb7c8333d355a832998a37416f139f89743e08d920526"} Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.902504 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.979177 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-64cb6d74d6-hlvzr"] Oct 03 13:11:12 crc kubenswrapper[4868]: E1003 13:11:12.979678 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" containerName="keystone-bootstrap" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.979691 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" containerName="keystone-bootstrap" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.979895 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" containerName="keystone-bootstrap" Oct 03 13:11:12 crc kubenswrapper[4868]: I1003 13:11:12.992075 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.010807 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.011908 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.012211 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.012356 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.012531 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.012644 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-8g5zj" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.028881 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-config-data\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.028951 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-scripts\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.028981 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-public-tls-certs\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.029013 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-internal-tls-certs\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.029036 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-credential-keys\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.029119 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-fernet-keys\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.029190 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rcvc\" (UniqueName: \"kubernetes.io/projected/d4f6f758-2853-4aa4-b040-f1c9501105a6-kube-api-access-9rcvc\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.029222 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-combined-ca-bundle\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.044478 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-64cb6d74d6-hlvzr"] Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.073117 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.130667 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rcvc\" (UniqueName: \"kubernetes.io/projected/d4f6f758-2853-4aa4-b040-f1c9501105a6-kube-api-access-9rcvc\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.130732 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-combined-ca-bundle\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.131830 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-config-data\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.133126 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-scripts\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.133389 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-public-tls-certs\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.133627 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-internal-tls-certs\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.133656 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-credential-keys\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.133687 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-fernet-keys\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.141635 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-public-tls-certs\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.142247 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-combined-ca-bundle\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.142732 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-config-data\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.145214 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-fernet-keys\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.149647 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-scripts\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.149664 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-internal-tls-certs\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.154472 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/d4f6f758-2853-4aa4-b040-f1c9501105a6-credential-keys\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.161574 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rcvc\" (UniqueName: \"kubernetes.io/projected/d4f6f758-2853-4aa4-b040-f1c9501105a6-kube-api-access-9rcvc\") pod \"keystone-64cb6d74d6-hlvzr\" (UID: \"d4f6f758-2853-4aa4-b040-f1c9501105a6\") " pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.374693 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.870707 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-64cb6d74d6-hlvzr"] Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.895771 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb","Type":"ContainerStarted","Data":"4dca05bfb01ca7f9ecd049a1d2e8053264b5d698a62996dd6d8bb057e041c5be"} Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.899468 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17fa6af0-60a1-4c36-a301-c7fbe6eb4546","Type":"ContainerStarted","Data":"bac965ccae4e292caebcb6b71f38dfbe49dc1e6dd63c9a7dfc21270526d2215b"} Oct 03 13:11:13 crc kubenswrapper[4868]: W1003 13:11:13.900319 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd4f6f758_2853_4aa4_b040_f1c9501105a6.slice/crio-d80dc91bc1b5eb52042b40f8e4779676b2d348bdc8b82f81dfacc8c260261c18 WatchSource:0}: Error finding container d80dc91bc1b5eb52042b40f8e4779676b2d348bdc8b82f81dfacc8c260261c18: Status 404 returned error can't find the container with id d80dc91bc1b5eb52042b40f8e4779676b2d348bdc8b82f81dfacc8c260261c18 Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.903242 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" event={"ID":"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346","Type":"ContainerStarted","Data":"724bf2aa921f2fd72c3885e1f2e95d89d05fd2af8a4ff321873e12f3396cd662"} Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.903485 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:13 crc kubenswrapper[4868]: I1003 13:11:13.932170 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" podStartSLOduration=4.932146262 podStartE2EDuration="4.932146262s" podCreationTimestamp="2025-10-03 13:11:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:13.923666075 +0000 UTC m=+1270.133515161" watchObservedRunningTime="2025-10-03 13:11:13.932146262 +0000 UTC m=+1270.141995328" Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.873900 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-545874b5c8-jnl6d" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.914254 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17fa6af0-60a1-4c36-a301-c7fbe6eb4546","Type":"ContainerStarted","Data":"dc90cef984d5095cf81757c7f645ac2535fdda2dade8550fe0ced4068f189f92"} Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.914699 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-log" containerID="cri-o://bac965ccae4e292caebcb6b71f38dfbe49dc1e6dd63c9a7dfc21270526d2215b" gracePeriod=30 Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.915273 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-httpd" containerID="cri-o://dc90cef984d5095cf81757c7f645ac2535fdda2dade8550fe0ced4068f189f92" gracePeriod=30 Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.921710 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-64cb6d74d6-hlvzr" event={"ID":"d4f6f758-2853-4aa4-b040-f1c9501105a6","Type":"ContainerStarted","Data":"30f05821eba6f8e61d2f35a9b2a8c145fef91524d70f9b065ca20f636788cdac"} Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.921775 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-64cb6d74d6-hlvzr" event={"ID":"d4f6f758-2853-4aa4-b040-f1c9501105a6","Type":"ContainerStarted","Data":"d80dc91bc1b5eb52042b40f8e4779676b2d348bdc8b82f81dfacc8c260261c18"} Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.923202 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.935714 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb","Type":"ContainerStarted","Data":"4e2db15fe2e461709585ac14a5369cb8ed80b69ae003027bf4f4fdc62e7fa0e9"} Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.939844 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-log" containerID="cri-o://4dca05bfb01ca7f9ecd049a1d2e8053264b5d698a62996dd6d8bb057e041c5be" gracePeriod=30 Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.940494 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-httpd" containerID="cri-o://4e2db15fe2e461709585ac14a5369cb8ed80b69ae003027bf4f4fdc62e7fa0e9" gracePeriod=30 Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.943765 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6545c458bd-ttzj2" podUID="8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.949390 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=5.949360658 podStartE2EDuration="5.949360658s" podCreationTimestamp="2025-10-03 13:11:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:14.937320016 +0000 UTC m=+1271.147169072" watchObservedRunningTime="2025-10-03 13:11:14.949360658 +0000 UTC m=+1271.159209724" Oct 03 13:11:14 crc kubenswrapper[4868]: I1003 13:11:14.965930 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-64cb6d74d6-hlvzr" podStartSLOduration=2.965900612 podStartE2EDuration="2.965900612s" podCreationTimestamp="2025-10-03 13:11:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:14.963162258 +0000 UTC m=+1271.173011324" watchObservedRunningTime="2025-10-03 13:11:14.965900612 +0000 UTC m=+1271.175749678" Oct 03 13:11:15 crc kubenswrapper[4868]: I1003 13:11:15.005404 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.00537582 podStartE2EDuration="6.00537582s" podCreationTimestamp="2025-10-03 13:11:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:14.997440708 +0000 UTC m=+1271.207289784" watchObservedRunningTime="2025-10-03 13:11:15.00537582 +0000 UTC m=+1271.215224906" Oct 03 13:11:15 crc kubenswrapper[4868]: I1003 13:11:15.945398 4868 generic.go:334] "Generic (PLEG): container finished" podID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerID="4dca05bfb01ca7f9ecd049a1d2e8053264b5d698a62996dd6d8bb057e041c5be" exitCode=143 Oct 03 13:11:15 crc kubenswrapper[4868]: I1003 13:11:15.945502 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb","Type":"ContainerDied","Data":"4dca05bfb01ca7f9ecd049a1d2e8053264b5d698a62996dd6d8bb057e041c5be"} Oct 03 13:11:15 crc kubenswrapper[4868]: I1003 13:11:15.947346 4868 generic.go:334] "Generic (PLEG): container finished" podID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerID="bac965ccae4e292caebcb6b71f38dfbe49dc1e6dd63c9a7dfc21270526d2215b" exitCode=143 Oct 03 13:11:15 crc kubenswrapper[4868]: I1003 13:11:15.947430 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17fa6af0-60a1-4c36-a301-c7fbe6eb4546","Type":"ContainerDied","Data":"bac965ccae4e292caebcb6b71f38dfbe49dc1e6dd63c9a7dfc21270526d2215b"} Oct 03 13:11:16 crc kubenswrapper[4868]: I1003 13:11:16.962041 4868 generic.go:334] "Generic (PLEG): container finished" podID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerID="4e2db15fe2e461709585ac14a5369cb8ed80b69ae003027bf4f4fdc62e7fa0e9" exitCode=0 Oct 03 13:11:16 crc kubenswrapper[4868]: I1003 13:11:16.962154 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb","Type":"ContainerDied","Data":"4e2db15fe2e461709585ac14a5369cb8ed80b69ae003027bf4f4fdc62e7fa0e9"} Oct 03 13:11:16 crc kubenswrapper[4868]: I1003 13:11:16.964812 4868 generic.go:334] "Generic (PLEG): container finished" podID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerID="dc90cef984d5095cf81757c7f645ac2535fdda2dade8550fe0ced4068f189f92" exitCode=0 Oct 03 13:11:16 crc kubenswrapper[4868]: I1003 13:11:16.964892 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17fa6af0-60a1-4c36-a301-c7fbe6eb4546","Type":"ContainerDied","Data":"dc90cef984d5095cf81757c7f645ac2535fdda2dade8550fe0ced4068f189f92"} Oct 03 13:11:16 crc kubenswrapper[4868]: I1003 13:11:16.966215 4868 generic.go:334] "Generic (PLEG): container finished" podID="ebf1c326-1c1f-45c6-a9af-f758959b97cf" containerID="4003895d18873ec4c7e75627e60e8bbd003495f765e71d869bbf905e972a8261" exitCode=0 Oct 03 13:11:16 crc kubenswrapper[4868]: I1003 13:11:16.966244 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-v2vbf" event={"ID":"ebf1c326-1c1f-45c6-a9af-f758959b97cf","Type":"ContainerDied","Data":"4003895d18873ec4c7e75627e60e8bbd003495f765e71d869bbf905e972a8261"} Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.097653 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.108662 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.220725 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-logs\") pod \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.220926 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-config-data\") pod \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.220952 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-config-data\") pod \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221007 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-scripts\") pod \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221065 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rz5mq\" (UniqueName: \"kubernetes.io/projected/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-kube-api-access-rz5mq\") pod \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221092 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-logs\") pod \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221112 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221205 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-combined-ca-bundle\") pod \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221233 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-httpd-run\") pod \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221248 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\" (UID: \"17fa6af0-60a1-4c36-a301-c7fbe6eb4546\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221283 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-combined-ca-bundle\") pod \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221314 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-scripts\") pod \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221394 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-httpd-run\") pod \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221428 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-logs" (OuterVolumeSpecName: "logs") pod "17fa6af0-60a1-4c36-a301-c7fbe6eb4546" (UID: "17fa6af0-60a1-4c36-a301-c7fbe6eb4546"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.221463 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4wfrl\" (UniqueName: \"kubernetes.io/projected/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-kube-api-access-4wfrl\") pod \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\" (UID: \"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb\") " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.222437 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" (UID: "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.223137 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.223153 4868 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.223275 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-logs" (OuterVolumeSpecName: "logs") pod "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" (UID: "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.223565 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "17fa6af0-60a1-4c36-a301-c7fbe6eb4546" (UID: "17fa6af0-60a1-4c36-a301-c7fbe6eb4546"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.231358 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" (UID: "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.231544 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-scripts" (OuterVolumeSpecName: "scripts") pod "17fa6af0-60a1-4c36-a301-c7fbe6eb4546" (UID: "17fa6af0-60a1-4c36-a301-c7fbe6eb4546"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.231431 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "17fa6af0-60a1-4c36-a301-c7fbe6eb4546" (UID: "17fa6af0-60a1-4c36-a301-c7fbe6eb4546"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.231690 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-kube-api-access-4wfrl" (OuterVolumeSpecName: "kube-api-access-4wfrl") pod "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" (UID: "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb"). InnerVolumeSpecName "kube-api-access-4wfrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.239134 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-kube-api-access-rz5mq" (OuterVolumeSpecName: "kube-api-access-rz5mq") pod "17fa6af0-60a1-4c36-a301-c7fbe6eb4546" (UID: "17fa6af0-60a1-4c36-a301-c7fbe6eb4546"). InnerVolumeSpecName "kube-api-access-rz5mq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.245714 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-scripts" (OuterVolumeSpecName: "scripts") pod "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" (UID: "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.252773 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" (UID: "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.272851 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "17fa6af0-60a1-4c36-a301-c7fbe6eb4546" (UID: "17fa6af0-60a1-4c36-a301-c7fbe6eb4546"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.283620 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-config-data" (OuterVolumeSpecName: "config-data") pod "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" (UID: "cd7cbd73-e92b-42dc-a86f-ccd67269e2eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.297938 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-config-data" (OuterVolumeSpecName: "config-data") pod "17fa6af0-60a1-4c36-a301-c7fbe6eb4546" (UID: "17fa6af0-60a1-4c36-a301-c7fbe6eb4546"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325568 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325617 4868 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325653 4868 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325668 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325680 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325690 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4wfrl\" (UniqueName: \"kubernetes.io/projected/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-kube-api-access-4wfrl\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325703 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325711 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325721 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325729 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rz5mq\" (UniqueName: \"kubernetes.io/projected/17fa6af0-60a1-4c36-a301-c7fbe6eb4546-kube-api-access-rz5mq\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325738 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.325758 4868 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.366550 4868 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.368839 4868 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.427278 4868 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.427313 4868 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.978867 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"cd7cbd73-e92b-42dc-a86f-ccd67269e2eb","Type":"ContainerDied","Data":"6993d4483e603ded268bb7c8333d355a832998a37416f139f89743e08d920526"} Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.978940 4868 scope.go:117] "RemoveContainer" containerID="4e2db15fe2e461709585ac14a5369cb8ed80b69ae003027bf4f4fdc62e7fa0e9" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.978955 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.983607 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"17fa6af0-60a1-4c36-a301-c7fbe6eb4546","Type":"ContainerDied","Data":"776325e002e150bb2b00fe246e124a1f379309ff480f78e9c36542a381ecda39"} Oct 03 13:11:17 crc kubenswrapper[4868]: I1003 13:11:17.983700 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.026140 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.048469 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.050540 4868 scope.go:117] "RemoveContainer" containerID="4dca05bfb01ca7f9ecd049a1d2e8053264b5d698a62996dd6d8bb057e041c5be" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.059214 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.074959 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.085993 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: E1003 13:11:18.086697 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-log" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.086765 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-log" Oct 03 13:11:18 crc kubenswrapper[4868]: E1003 13:11:18.086850 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-httpd" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.086901 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-httpd" Oct 03 13:11:18 crc kubenswrapper[4868]: E1003 13:11:18.086971 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-httpd" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.087038 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-httpd" Oct 03 13:11:18 crc kubenswrapper[4868]: E1003 13:11:18.087139 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-log" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.087213 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-log" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.087440 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-log" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.087502 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" containerName="glance-httpd" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.087563 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-log" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.087692 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" containerName="glance-httpd" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.089288 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.092505 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.094317 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-nrct9" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.094554 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.094757 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.095428 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.099753 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.101719 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.104957 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.105232 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.107962 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.224568 4868 scope.go:117] "RemoveContainer" containerID="dc90cef984d5095cf81757c7f645ac2535fdda2dade8550fe0ced4068f189f92" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249171 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249222 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw2hh\" (UniqueName: \"kubernetes.io/projected/08fd5302-f657-4b15-ae77-4bac04d778da-kube-api-access-nw2hh\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249245 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249269 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249285 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-scripts\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249324 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249379 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249401 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-logs\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249532 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsgqj\" (UniqueName: \"kubernetes.io/projected/7ec71cd0-6a74-4cee-9739-f26b70c9716f-kube-api-access-rsgqj\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249560 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-config-data\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249579 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249602 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249644 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249662 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-logs\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249677 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.249700 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.284643 4868 scope.go:117] "RemoveContainer" containerID="bac965ccae4e292caebcb6b71f38dfbe49dc1e6dd63c9a7dfc21270526d2215b" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.352728 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-logs\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.352792 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.352866 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.352903 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.352926 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw2hh\" (UniqueName: \"kubernetes.io/projected/08fd5302-f657-4b15-ae77-4bac04d778da-kube-api-access-nw2hh\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.352954 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.352981 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.353002 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-scripts\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.353120 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.353199 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.354509 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.355201 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-logs\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.360563 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.362564 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-logs\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.362691 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsgqj\" (UniqueName: \"kubernetes.io/projected/7ec71cd0-6a74-4cee-9739-f26b70c9716f-kube-api-access-rsgqj\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.362756 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-config-data\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.362793 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.362840 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.362959 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.363198 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.365762 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-logs\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.366025 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.366303 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.366343 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.366999 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.368513 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.369203 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-scripts\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.370180 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-config-data\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.373244 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.385807 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.388123 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw2hh\" (UniqueName: \"kubernetes.io/projected/08fd5302-f657-4b15-ae77-4bac04d778da-kube-api-access-nw2hh\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.389096 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsgqj\" (UniqueName: \"kubernetes.io/projected/7ec71cd0-6a74-4cee-9739-f26b70c9716f-kube-api-access-rsgqj\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.401068 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.441029 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.499473 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.531605 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.545151 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.567838 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-db-sync-config-data\") pod \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.568666 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtdxk\" (UniqueName: \"kubernetes.io/projected/ebf1c326-1c1f-45c6-a9af-f758959b97cf-kube-api-access-xtdxk\") pod \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.568775 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-combined-ca-bundle\") pod \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\" (UID: \"ebf1c326-1c1f-45c6-a9af-f758959b97cf\") " Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.573955 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ebf1c326-1c1f-45c6-a9af-f758959b97cf" (UID: "ebf1c326-1c1f-45c6-a9af-f758959b97cf"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.584577 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebf1c326-1c1f-45c6-a9af-f758959b97cf-kube-api-access-xtdxk" (OuterVolumeSpecName: "kube-api-access-xtdxk") pod "ebf1c326-1c1f-45c6-a9af-f758959b97cf" (UID: "ebf1c326-1c1f-45c6-a9af-f758959b97cf"). InnerVolumeSpecName "kube-api-access-xtdxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.589594 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17fa6af0-60a1-4c36-a301-c7fbe6eb4546" path="/var/lib/kubelet/pods/17fa6af0-60a1-4c36-a301-c7fbe6eb4546/volumes" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.590433 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd7cbd73-e92b-42dc-a86f-ccd67269e2eb" path="/var/lib/kubelet/pods/cd7cbd73-e92b-42dc-a86f-ccd67269e2eb/volumes" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.606581 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ebf1c326-1c1f-45c6-a9af-f758959b97cf" (UID: "ebf1c326-1c1f-45c6-a9af-f758959b97cf"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.674944 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.674991 4868 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ebf1c326-1c1f-45c6-a9af-f758959b97cf-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:18 crc kubenswrapper[4868]: I1003 13:11:18.675002 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtdxk\" (UniqueName: \"kubernetes.io/projected/ebf1c326-1c1f-45c6-a9af-f758959b97cf-kube-api-access-xtdxk\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.003104 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-v2vbf" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.003265 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-v2vbf" event={"ID":"ebf1c326-1c1f-45c6-a9af-f758959b97cf","Type":"ContainerDied","Data":"05b7d2061cde4d0b83d5f5f292be750e0eb5b2ca69565848914e79288c92ddd5"} Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.004396 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="05b7d2061cde4d0b83d5f5f292be750e0eb5b2ca69565848914e79288c92ddd5" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.141205 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.399054 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7dc9986bd6-bfv66"] Oct 03 13:11:19 crc kubenswrapper[4868]: E1003 13:11:19.409321 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebf1c326-1c1f-45c6-a9af-f758959b97cf" containerName="barbican-db-sync" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.409393 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebf1c326-1c1f-45c6-a9af-f758959b97cf" containerName="barbican-db-sync" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.410208 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebf1c326-1c1f-45c6-a9af-f758959b97cf" containerName="barbican-db-sync" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.412550 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.416801 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.417511 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-zhrmp" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.417800 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.448624 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-7f8d674479-d2hdw"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.476825 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.487690 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.508947 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-combined-ca-bundle\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.509027 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-config-data-custom\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.521437 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-config-data\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.521717 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-logs\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.521799 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8ssp\" (UniqueName: \"kubernetes.io/projected/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-kube-api-access-j8ssp\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.556608 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7dc9986bd6-bfv66"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.614316 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f8d674479-d2hdw"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.631284 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-config-data\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.631343 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-combined-ca-bundle\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.631423 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-combined-ca-bundle\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.631452 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-config-data-custom\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.631765 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-config-data\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.632137 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-logs\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.632247 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8ssp\" (UniqueName: \"kubernetes.io/projected/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-kube-api-access-j8ssp\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.632335 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-logs\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.632385 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m797j\" (UniqueName: \"kubernetes.io/projected/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-kube-api-access-m797j\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.632519 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-config-data-custom\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.637820 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-logs\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.666507 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-config-data-custom\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.694530 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-config-data\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.700917 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-combined-ca-bundle\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.736600 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.739189 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-logs\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.739246 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m797j\" (UniqueName: \"kubernetes.io/projected/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-kube-api-access-m797j\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.739313 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-config-data-custom\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.739358 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-config-data\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.739395 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-combined-ca-bundle\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.741695 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-logs\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.747937 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4tw2b"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.748305 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="dnsmasq-dns" containerID="cri-o://724bf2aa921f2fd72c3885e1f2e95d89d05fd2af8a4ff321873e12f3396cd662" gracePeriod=10 Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.751229 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.755977 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8ssp\" (UniqueName: \"kubernetes.io/projected/41df168a-4bf1-44a4-ba5a-2f398a82c8fb-kube-api-access-j8ssp\") pod \"barbican-keystone-listener-7dc9986bd6-bfv66\" (UID: \"41df168a-4bf1-44a4-ba5a-2f398a82c8fb\") " pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.763065 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-combined-ca-bundle\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.770690 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-8f94c"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.774228 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-config-data-custom\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.775401 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-config-data\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.784326 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-8f94c"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.784655 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.802851 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m797j\" (UniqueName: \"kubernetes.io/projected/9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce-kube-api-access-m797j\") pod \"barbican-worker-7f8d674479-d2hdw\" (UID: \"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce\") " pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.830930 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.869207 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-7f8d674479-d2hdw" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.915585 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5f6c765c74-2kv7n"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.921657 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.932800 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.938107 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f6c765c74-2kv7n"] Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.961072 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.961744 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-config\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.961768 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.961842 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cvmk\" (UniqueName: \"kubernetes.io/projected/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-kube-api-access-8cvmk\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.961883 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.961916 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:19 crc kubenswrapper[4868]: I1003 13:11:19.992668 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.145:5353: connect: connection refused" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.062429 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7ec71cd0-6a74-4cee-9739-f26b70c9716f","Type":"ContainerStarted","Data":"a7ebd8f1b62413c8c274b0b626189fe08682431421a95beb63ab5659795515e1"} Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063258 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063405 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-config\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063432 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063460 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063498 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b92a9aa-964f-4783-b419-aa3e5776298f-logs\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063527 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cvmk\" (UniqueName: \"kubernetes.io/projected/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-kube-api-access-8cvmk\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063547 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4mxp\" (UniqueName: \"kubernetes.io/projected/2b92a9aa-964f-4783-b419-aa3e5776298f-kube-api-access-r4mxp\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063572 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data-custom\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063600 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063626 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.063764 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-combined-ca-bundle\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.064670 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-svc\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.065101 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-swift-storage-0\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.065298 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-sb\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.065765 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-config\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.068165 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-nb\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.087229 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cvmk\" (UniqueName: \"kubernetes.io/projected/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-kube-api-access-8cvmk\") pod \"dnsmasq-dns-586bdc5f9-8f94c\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.118302 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdrmw" event={"ID":"ff373899-8e15-4a17-a2dc-ae81859fc44e","Type":"ContainerStarted","Data":"e493069de42bc4c6d847ec08e2b7a4ba442ef271a398b9a66912a2316d7cc0a7"} Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.132973 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"08fd5302-f657-4b15-ae77-4bac04d778da","Type":"ContainerStarted","Data":"1bedad5d2e96705ea29cc670de1fbf37e26b8a9089b0f7c05aea82fe6c2674ef"} Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.165606 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data-custom\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.165720 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-combined-ca-bundle\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.165841 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.165893 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b92a9aa-964f-4783-b419-aa3e5776298f-logs\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.165936 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4mxp\" (UniqueName: \"kubernetes.io/projected/2b92a9aa-964f-4783-b419-aa3e5776298f-kube-api-access-r4mxp\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.182922 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-mdrmw" podStartSLOduration=8.129801276 podStartE2EDuration="55.182893516s" podCreationTimestamp="2025-10-03 13:10:25 +0000 UTC" firstStartedPulling="2025-10-03 13:10:31.000816498 +0000 UTC m=+1227.210665574" lastFinishedPulling="2025-10-03 13:11:18.053908748 +0000 UTC m=+1274.263757814" observedRunningTime="2025-10-03 13:11:20.158425191 +0000 UTC m=+1276.368274257" watchObservedRunningTime="2025-10-03 13:11:20.182893516 +0000 UTC m=+1276.392742592" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.186558 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b92a9aa-964f-4783-b419-aa3e5776298f-logs\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.188716 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-combined-ca-bundle\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.189995 4868 generic.go:334] "Generic (PLEG): container finished" podID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerID="724bf2aa921f2fd72c3885e1f2e95d89d05fd2af8a4ff321873e12f3396cd662" exitCode=0 Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.190168 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" event={"ID":"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346","Type":"ContainerDied","Data":"724bf2aa921f2fd72c3885e1f2e95d89d05fd2af8a4ff321873e12f3396cd662"} Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.193541 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data-custom\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.195269 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.202888 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.209629 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4mxp\" (UniqueName: \"kubernetes.io/projected/2b92a9aa-964f-4783-b419-aa3e5776298f-kube-api-access-r4mxp\") pod \"barbican-api-5f6c765c74-2kv7n\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.253798 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.514398 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-7f8d674479-d2hdw"] Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.637959 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7dc9986bd6-bfv66"] Oct 03 13:11:20 crc kubenswrapper[4868]: W1003 13:11:20.660590 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod41df168a_4bf1_44a4_ba5a_2f398a82c8fb.slice/crio-c43f4d9cd7a8f8d08606fcc4594b176d8b7ed54056b4864093400f3a43d3e827 WatchSource:0}: Error finding container c43f4d9cd7a8f8d08606fcc4594b176d8b7ed54056b4864093400f3a43d3e827: Status 404 returned error can't find the container with id c43f4d9cd7a8f8d08606fcc4594b176d8b7ed54056b4864093400f3a43d3e827 Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.803172 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-8f94c"] Oct 03 13:11:20 crc kubenswrapper[4868]: W1003 13:11:20.810763 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcab73c7e_f1c2_413d_b8e9_f61e35ed604b.slice/crio-d14f10a8e4ec95db536ed14472b6482f4efc6d9667067bb5942434bb7eec274a WatchSource:0}: Error finding container d14f10a8e4ec95db536ed14472b6482f4efc6d9667067bb5942434bb7eec274a: Status 404 returned error can't find the container with id d14f10a8e4ec95db536ed14472b6482f4efc6d9667067bb5942434bb7eec274a Oct 03 13:11:20 crc kubenswrapper[4868]: I1003 13:11:20.903867 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5f6c765c74-2kv7n"] Oct 03 13:11:20 crc kubenswrapper[4868]: W1003 13:11:20.906402 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b92a9aa_964f_4783_b419_aa3e5776298f.slice/crio-3ff1b4d958ce9174f0442dfe950b6a45cb40d3be3ed69d97f757a20b73137c3f WatchSource:0}: Error finding container 3ff1b4d958ce9174f0442dfe950b6a45cb40d3be3ed69d97f757a20b73137c3f: Status 404 returned error can't find the container with id 3ff1b4d958ce9174f0442dfe950b6a45cb40d3be3ed69d97f757a20b73137c3f Oct 03 13:11:21 crc kubenswrapper[4868]: I1003 13:11:21.202556 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6c765c74-2kv7n" event={"ID":"2b92a9aa-964f-4783-b419-aa3e5776298f","Type":"ContainerStarted","Data":"3ff1b4d958ce9174f0442dfe950b6a45cb40d3be3ed69d97f757a20b73137c3f"} Oct 03 13:11:21 crc kubenswrapper[4868]: I1003 13:11:21.204359 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" event={"ID":"cab73c7e-f1c2-413d-b8e9-f61e35ed604b","Type":"ContainerStarted","Data":"d14f10a8e4ec95db536ed14472b6482f4efc6d9667067bb5942434bb7eec274a"} Oct 03 13:11:21 crc kubenswrapper[4868]: I1003 13:11:21.205913 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" event={"ID":"41df168a-4bf1-44a4-ba5a-2f398a82c8fb","Type":"ContainerStarted","Data":"c43f4d9cd7a8f8d08606fcc4594b176d8b7ed54056b4864093400f3a43d3e827"} Oct 03 13:11:21 crc kubenswrapper[4868]: I1003 13:11:21.208137 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f8d674479-d2hdw" event={"ID":"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce","Type":"ContainerStarted","Data":"95cffcf19eef667360652fb03d472f1e372aef30eb1c5e9c72fc18f50a9e67e3"} Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.247964 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"08fd5302-f657-4b15-ae77-4bac04d778da","Type":"ContainerStarted","Data":"a928878d0716c98ea372d72b83afdbe97d0c7cb59ce6624f8e5b42f3852abba1"} Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.250538 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6c765c74-2kv7n" event={"ID":"2b92a9aa-964f-4783-b419-aa3e5776298f","Type":"ContainerStarted","Data":"2ec4200617faae7c8f5af8044528e818cdd7eba9d77ae5c21a542103a65dc26b"} Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.253274 4868 generic.go:334] "Generic (PLEG): container finished" podID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerID="813e8e5b7c6f68af01b52f29e4eed923db753c65b3ce4c92c7e53c28d6742ae0" exitCode=0 Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.253327 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" event={"ID":"cab73c7e-f1c2-413d-b8e9-f61e35ed604b","Type":"ContainerDied","Data":"813e8e5b7c6f68af01b52f29e4eed923db753c65b3ce4c92c7e53c28d6742ae0"} Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.262081 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7ec71cd0-6a74-4cee-9739-f26b70c9716f","Type":"ContainerStarted","Data":"8c742643c1b94b2d18ca94d96073fa89187103db0d375ac70a0e04bbdcd76d7b"} Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.433243 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-cbcbd76c4-z459l"] Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.437770 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.443521 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.444163 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.461131 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-cbcbd76c4-z459l"] Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.534204 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-combined-ca-bundle\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.534273 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-internal-tls-certs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.534348 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-public-tls-certs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.534416 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-config-data\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.534476 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15bc6da3-2507-4029-80fb-fa480d30f199-logs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.534500 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-config-data-custom\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.534556 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qslfn\" (UniqueName: \"kubernetes.io/projected/15bc6da3-2507-4029-80fb-fa480d30f199-kube-api-access-qslfn\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.636214 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-combined-ca-bundle\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.636281 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-internal-tls-certs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.636354 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-public-tls-certs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.636436 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-config-data\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.636536 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15bc6da3-2507-4029-80fb-fa480d30f199-logs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.636579 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-config-data-custom\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.636618 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qslfn\" (UniqueName: \"kubernetes.io/projected/15bc6da3-2507-4029-80fb-fa480d30f199-kube-api-access-qslfn\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.637832 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/15bc6da3-2507-4029-80fb-fa480d30f199-logs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.644191 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-internal-tls-certs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.645560 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-combined-ca-bundle\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.646770 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-config-data-custom\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.647334 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-config-data\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.647537 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/15bc6da3-2507-4029-80fb-fa480d30f199-public-tls-certs\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.660919 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qslfn\" (UniqueName: \"kubernetes.io/projected/15bc6da3-2507-4029-80fb-fa480d30f199-kube-api-access-qslfn\") pod \"barbican-api-cbcbd76c4-z459l\" (UID: \"15bc6da3-2507-4029-80fb-fa480d30f199\") " pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:22 crc kubenswrapper[4868]: I1003 13:11:22.784749 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:24 crc kubenswrapper[4868]: I1003 13:11:24.869225 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-545874b5c8-jnl6d" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 03 13:11:24 crc kubenswrapper[4868]: I1003 13:11:24.939746 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-6545c458bd-ttzj2" podUID="8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.143:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.143:8443: connect: connection refused" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.319430 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.321508 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" event={"ID":"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346","Type":"ContainerDied","Data":"d78447735695708130b0bf57bbc075de47b277990d09acde87e5778349607c15"} Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.321574 4868 scope.go:117] "RemoveContainer" containerID="724bf2aa921f2fd72c3885e1f2e95d89d05fd2af8a4ff321873e12f3396cd662" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.431047 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-nb\") pod \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.431246 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-swift-storage-0\") pod \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.431469 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jsd9\" (UniqueName: \"kubernetes.io/projected/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-kube-api-access-5jsd9\") pod \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.432024 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-svc\") pod \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.432138 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-sb\") pod \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.432171 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-config\") pod \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\" (UID: \"5855fdd4-aa4b-49e9-b138-0c2ba7c7c346\") " Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.451523 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-kube-api-access-5jsd9" (OuterVolumeSpecName: "kube-api-access-5jsd9") pod "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" (UID: "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346"). InnerVolumeSpecName "kube-api-access-5jsd9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.500266 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-config" (OuterVolumeSpecName: "config") pod "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" (UID: "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.501952 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" (UID: "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.511351 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" (UID: "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.515376 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" (UID: "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.535018 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.535086 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.535101 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.535113 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jsd9\" (UniqueName: \"kubernetes.io/projected/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-kube-api-access-5jsd9\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.535131 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.556213 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" (UID: "5855fdd4-aa4b-49e9-b138-0c2ba7c7c346"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:26 crc kubenswrapper[4868]: I1003 13:11:26.637123 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:27 crc kubenswrapper[4868]: I1003 13:11:27.330573 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" Oct 03 13:11:27 crc kubenswrapper[4868]: I1003 13:11:27.362380 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4tw2b"] Oct 03 13:11:27 crc kubenswrapper[4868]: I1003 13:11:27.370810 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-4tw2b"] Oct 03 13:11:28 crc kubenswrapper[4868]: I1003 13:11:28.557250 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" path="/var/lib/kubelet/pods/5855fdd4-aa4b-49e9-b138-0c2ba7c7c346/volumes" Oct 03 13:11:28 crc kubenswrapper[4868]: I1003 13:11:28.805875 4868 scope.go:117] "RemoveContainer" containerID="da49d5b893f722525be0b8ab0c35bf94be1fb05b19232feba526bba6347ebc07" Oct 03 13:11:29 crc kubenswrapper[4868]: I1003 13:11:29.361949 4868 generic.go:334] "Generic (PLEG): container finished" podID="cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" containerID="8163c9a92feaddd362cb2dfff8aa650a81a4d993a162ff1d953fdcb7b94f1995" exitCode=0 Oct 03 13:11:29 crc kubenswrapper[4868]: I1003 13:11:29.362029 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7f6km" event={"ID":"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b","Type":"ContainerDied","Data":"8163c9a92feaddd362cb2dfff8aa650a81a4d993a162ff1d953fdcb7b94f1995"} Oct 03 13:11:29 crc kubenswrapper[4868]: I1003 13:11:29.888935 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-cbcbd76c4-z459l"] Oct 03 13:11:29 crc kubenswrapper[4868]: W1003 13:11:29.910684 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod15bc6da3_2507_4029_80fb_fa480d30f199.slice/crio-a55787605cc3c0b6cf9e3fc75074fef3ffe5560bbd8d695f8a0a0d1cfade0c07 WatchSource:0}: Error finding container a55787605cc3c0b6cf9e3fc75074fef3ffe5560bbd8d695f8a0a0d1cfade0c07: Status 404 returned error can't find the container with id a55787605cc3c0b6cf9e3fc75074fef3ffe5560bbd8d695f8a0a0d1cfade0c07 Oct 03 13:11:29 crc kubenswrapper[4868]: E1003 13:11:29.943929 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" Oct 03 13:11:29 crc kubenswrapper[4868]: I1003 13:11:29.993460 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-785d8bcb8c-4tw2b" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.145:5353: i/o timeout" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.384375 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerStarted","Data":"b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.384558 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.384621 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="proxy-httpd" containerID="cri-o://b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df" gracePeriod=30 Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.384636 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="ceilometer-notification-agent" containerID="cri-o://8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8" gracePeriod=30 Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.384621 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="sg-core" containerID="cri-o://a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c" gracePeriod=30 Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.390236 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"08fd5302-f657-4b15-ae77-4bac04d778da","Type":"ContainerStarted","Data":"2d4f0f94fad4115cc12f547af93b872acfe38ceead6e181f6743e572015ed01f"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.394755 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cbcbd76c4-z459l" event={"ID":"15bc6da3-2507-4029-80fb-fa480d30f199","Type":"ContainerStarted","Data":"e199f31ff9ca537920a94b5eaebb83db336b6c76ecc81e51741cb4efb5e64891"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.394811 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cbcbd76c4-z459l" event={"ID":"15bc6da3-2507-4029-80fb-fa480d30f199","Type":"ContainerStarted","Data":"a55787605cc3c0b6cf9e3fc75074fef3ffe5560bbd8d695f8a0a0d1cfade0c07"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.409555 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f8d674479-d2hdw" event={"ID":"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce","Type":"ContainerStarted","Data":"2bdc921862f383c3f3431f377ebb379142dada91fd93763855b6fdfab26ffaa8"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.410162 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-7f8d674479-d2hdw" event={"ID":"9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce","Type":"ContainerStarted","Data":"da755f02b3781fd14c0a837682309c67030044e88cfc694be503c33703b70866"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.422332 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6c765c74-2kv7n" event={"ID":"2b92a9aa-964f-4783-b419-aa3e5776298f","Type":"ContainerStarted","Data":"aa8b26c2a595c05ebde88c41e17d0f99240a45079271babacbf828094f133954"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.423328 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.429386 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" event={"ID":"cab73c7e-f1c2-413d-b8e9-f61e35ed604b","Type":"ContainerStarted","Data":"6ee6f6a2945005220c600e5e00fe092e3ac0edfc59e56bb58444a4bad07a90ad"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.429507 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.433286 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7ec71cd0-6a74-4cee-9739-f26b70c9716f","Type":"ContainerStarted","Data":"3f1a89735a0e7af92e496d4a81f856bf15e7c9e6b7f2753eddd5b930229582f1"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.439975 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" event={"ID":"41df168a-4bf1-44a4-ba5a-2f398a82c8fb","Type":"ContainerStarted","Data":"21cac3809497f4220506d6a9f3066312f762b193e2adcd9344b8b3e28c5acdc3"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.440028 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" event={"ID":"41df168a-4bf1-44a4-ba5a-2f398a82c8fb","Type":"ContainerStarted","Data":"8360c3252b711a518ad66d9e7b91c2aab95174d1c38153a491184cdd33a08fc8"} Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.480230 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-7f8d674479-d2hdw" podStartSLOduration=2.49429541 podStartE2EDuration="11.480207472s" podCreationTimestamp="2025-10-03 13:11:19 +0000 UTC" firstStartedPulling="2025-10-03 13:11:20.537710198 +0000 UTC m=+1276.747559264" lastFinishedPulling="2025-10-03 13:11:29.52362225 +0000 UTC m=+1285.733471326" observedRunningTime="2025-10-03 13:11:30.441385441 +0000 UTC m=+1286.651234507" watchObservedRunningTime="2025-10-03 13:11:30.480207472 +0000 UTC m=+1286.690056528" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.581212 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=12.581185299 podStartE2EDuration="12.581185299s" podCreationTimestamp="2025-10-03 13:11:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:30.469526475 +0000 UTC m=+1286.679375541" watchObservedRunningTime="2025-10-03 13:11:30.581185299 +0000 UTC m=+1286.791034365" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.628129 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" podStartSLOduration=11.628093506 podStartE2EDuration="11.628093506s" podCreationTimestamp="2025-10-03 13:11:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:30.50068871 +0000 UTC m=+1286.710537796" watchObservedRunningTime="2025-10-03 13:11:30.628093506 +0000 UTC m=+1286.837942572" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.655757 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5f6c765c74-2kv7n" podStartSLOduration=11.655723857 podStartE2EDuration="11.655723857s" podCreationTimestamp="2025-10-03 13:11:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:30.559643981 +0000 UTC m=+1286.769493057" watchObservedRunningTime="2025-10-03 13:11:30.655723857 +0000 UTC m=+1286.865572913" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.669728 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7dc9986bd6-bfv66" podStartSLOduration=2.807975698 podStartE2EDuration="11.669699881s" podCreationTimestamp="2025-10-03 13:11:19 +0000 UTC" firstStartedPulling="2025-10-03 13:11:20.663037227 +0000 UTC m=+1276.872886293" lastFinishedPulling="2025-10-03 13:11:29.52476141 +0000 UTC m=+1285.734610476" observedRunningTime="2025-10-03 13:11:30.594491535 +0000 UTC m=+1286.804340601" watchObservedRunningTime="2025-10-03 13:11:30.669699881 +0000 UTC m=+1286.879548957" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.671309 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=12.671279343 podStartE2EDuration="12.671279343s" podCreationTimestamp="2025-10-03 13:11:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:30.626638947 +0000 UTC m=+1286.836488013" watchObservedRunningTime="2025-10-03 13:11:30.671279343 +0000 UTC m=+1286.881128429" Oct 03 13:11:30 crc kubenswrapper[4868]: I1003 13:11:30.950885 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7f6km" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.057107 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vt6b\" (UniqueName: \"kubernetes.io/projected/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-kube-api-access-4vt6b\") pod \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.057204 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-combined-ca-bundle\") pod \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.057354 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-scripts\") pod \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.057442 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-config-data\") pod \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.057507 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-logs\") pod \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\" (UID: \"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b\") " Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.058541 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-logs" (OuterVolumeSpecName: "logs") pod "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" (UID: "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.059377 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.063766 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-kube-api-access-4vt6b" (OuterVolumeSpecName: "kube-api-access-4vt6b") pod "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" (UID: "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b"). InnerVolumeSpecName "kube-api-access-4vt6b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.066653 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-scripts" (OuterVolumeSpecName: "scripts") pod "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" (UID: "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.093588 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" (UID: "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.111595 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-config-data" (OuterVolumeSpecName: "config-data") pod "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" (UID: "cf1f9cd5-4be0-47d6-a72b-46c83aebb53b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.161970 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vt6b\" (UniqueName: \"kubernetes.io/projected/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-kube-api-access-4vt6b\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.162023 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.162039 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.162070 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.479700 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-7f6km" event={"ID":"cf1f9cd5-4be0-47d6-a72b-46c83aebb53b","Type":"ContainerDied","Data":"0dffdc482b882789f7e688cbdd30874c9999f2de45c84e2e653fec17237b6fbe"} Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.479755 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0dffdc482b882789f7e688cbdd30874c9999f2de45c84e2e653fec17237b6fbe" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.479835 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-7f6km" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.499132 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-5786fc7ff8-jg85w"] Oct 03 13:11:31 crc kubenswrapper[4868]: E1003 13:11:31.499644 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="init" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.499670 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="init" Oct 03 13:11:31 crc kubenswrapper[4868]: E1003 13:11:31.499694 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" containerName="placement-db-sync" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.499703 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" containerName="placement-db-sync" Oct 03 13:11:31 crc kubenswrapper[4868]: E1003 13:11:31.499713 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="dnsmasq-dns" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.499729 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="dnsmasq-dns" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.499926 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5855fdd4-aa4b-49e9-b138-0c2ba7c7c346" containerName="dnsmasq-dns" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.499948 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" containerName="placement-db-sync" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.505892 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.525289 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5786fc7ff8-jg85w"] Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.543673 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.544007 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.544823 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-m5q5x" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.545193 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.546415 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.551646 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-cbcbd76c4-z459l" event={"ID":"15bc6da3-2507-4029-80fb-fa480d30f199","Type":"ContainerStarted","Data":"5377810cd48b93df27114bb14214b3cf766cefe3328334cfe51eaf462c4fdfa7"} Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.551855 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.552146 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.557317 4868 generic.go:334] "Generic (PLEG): container finished" podID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerID="b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df" exitCode=0 Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.557358 4868 generic.go:334] "Generic (PLEG): container finished" podID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerID="a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c" exitCode=2 Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.558434 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerDied","Data":"b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df"} Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.558468 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerDied","Data":"a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c"} Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.562849 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.604518 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-cbcbd76c4-z459l" podStartSLOduration=9.604497629 podStartE2EDuration="9.604497629s" podCreationTimestamp="2025-10-03 13:11:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:31.603209105 +0000 UTC m=+1287.813058171" watchObservedRunningTime="2025-10-03 13:11:31.604497629 +0000 UTC m=+1287.814346695" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.682338 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-logs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.682894 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-internal-tls-certs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.683068 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-config-data\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.684018 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-combined-ca-bundle\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.684613 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rc2kx\" (UniqueName: \"kubernetes.io/projected/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-kube-api-access-rc2kx\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.684653 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-public-tls-certs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.684859 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-scripts\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.786992 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-combined-ca-bundle\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.787095 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rc2kx\" (UniqueName: \"kubernetes.io/projected/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-kube-api-access-rc2kx\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.787118 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-public-tls-certs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.787166 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-scripts\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.787222 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-logs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.787257 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-internal-tls-certs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.787284 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-config-data\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.788296 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-logs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.796265 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-public-tls-certs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.796971 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-scripts\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.797029 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-internal-tls-certs\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.797715 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-combined-ca-bundle\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.811716 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-config-data\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.812786 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rc2kx\" (UniqueName: \"kubernetes.io/projected/cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f-kube-api-access-rc2kx\") pod \"placement-5786fc7ff8-jg85w\" (UID: \"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f\") " pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:31 crc kubenswrapper[4868]: I1003 13:11:31.879429 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.205616 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.313030 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-combined-ca-bundle\") pod \"a4226f68-3dce-4cd2-a376-68cec266cd91\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.313192 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-scripts\") pod \"a4226f68-3dce-4cd2-a376-68cec266cd91\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.313226 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gzxfk\" (UniqueName: \"kubernetes.io/projected/a4226f68-3dce-4cd2-a376-68cec266cd91-kube-api-access-gzxfk\") pod \"a4226f68-3dce-4cd2-a376-68cec266cd91\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.313345 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-run-httpd\") pod \"a4226f68-3dce-4cd2-a376-68cec266cd91\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.313389 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-config-data\") pod \"a4226f68-3dce-4cd2-a376-68cec266cd91\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.313449 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-sg-core-conf-yaml\") pod \"a4226f68-3dce-4cd2-a376-68cec266cd91\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.313490 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-log-httpd\") pod \"a4226f68-3dce-4cd2-a376-68cec266cd91\" (UID: \"a4226f68-3dce-4cd2-a376-68cec266cd91\") " Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.314162 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a4226f68-3dce-4cd2-a376-68cec266cd91" (UID: "a4226f68-3dce-4cd2-a376-68cec266cd91"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.317620 4868 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.317962 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a4226f68-3dce-4cd2-a376-68cec266cd91" (UID: "a4226f68-3dce-4cd2-a376-68cec266cd91"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.319921 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-scripts" (OuterVolumeSpecName: "scripts") pod "a4226f68-3dce-4cd2-a376-68cec266cd91" (UID: "a4226f68-3dce-4cd2-a376-68cec266cd91"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.322381 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4226f68-3dce-4cd2-a376-68cec266cd91-kube-api-access-gzxfk" (OuterVolumeSpecName: "kube-api-access-gzxfk") pod "a4226f68-3dce-4cd2-a376-68cec266cd91" (UID: "a4226f68-3dce-4cd2-a376-68cec266cd91"). InnerVolumeSpecName "kube-api-access-gzxfk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.354158 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a4226f68-3dce-4cd2-a376-68cec266cd91" (UID: "a4226f68-3dce-4cd2-a376-68cec266cd91"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.385225 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a4226f68-3dce-4cd2-a376-68cec266cd91" (UID: "a4226f68-3dce-4cd2-a376-68cec266cd91"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.412507 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-config-data" (OuterVolumeSpecName: "config-data") pod "a4226f68-3dce-4cd2-a376-68cec266cd91" (UID: "a4226f68-3dce-4cd2-a376-68cec266cd91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.419155 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.419189 4868 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.419205 4868 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a4226f68-3dce-4cd2-a376-68cec266cd91-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.419216 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.419228 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a4226f68-3dce-4cd2-a376-68cec266cd91-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.419237 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gzxfk\" (UniqueName: \"kubernetes.io/projected/a4226f68-3dce-4cd2-a376-68cec266cd91-kube-api-access-gzxfk\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.525905 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-5786fc7ff8-jg85w"] Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.573066 4868 generic.go:334] "Generic (PLEG): container finished" podID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerID="8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8" exitCode=0 Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.573136 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerDied","Data":"8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8"} Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.573167 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a4226f68-3dce-4cd2-a376-68cec266cd91","Type":"ContainerDied","Data":"14963551e9ff9d3a96613b7a6507565ceb738a78ab14c7037d3963815e6f07a2"} Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.573185 4868 scope.go:117] "RemoveContainer" containerID="b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.573416 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.578172 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5786fc7ff8-jg85w" event={"ID":"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f","Type":"ContainerStarted","Data":"781b3051da8d4f6a0d29f8e4d52d7172a084a33202fad336dc187069213251c9"} Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.579010 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.652297 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.670949 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.692473 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:11:32 crc kubenswrapper[4868]: E1003 13:11:32.693046 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="proxy-httpd" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.693088 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="proxy-httpd" Oct 03 13:11:32 crc kubenswrapper[4868]: E1003 13:11:32.693107 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="ceilometer-notification-agent" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.693116 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="ceilometer-notification-agent" Oct 03 13:11:32 crc kubenswrapper[4868]: E1003 13:11:32.693149 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="sg-core" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.693158 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="sg-core" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.693338 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="ceilometer-notification-agent" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.693358 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="sg-core" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.693378 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" containerName="proxy-httpd" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.695486 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.697639 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.699560 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.705906 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.786377 4868 scope.go:117] "RemoveContainer" containerID="a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.811397 4868 scope.go:117] "RemoveContainer" containerID="8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.829362 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-config-data\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.829420 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-scripts\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.829718 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x5d2\" (UniqueName: \"kubernetes.io/projected/65fa5327-5088-4f07-9efd-3685ce55a528-kube-api-access-6x5d2\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.829828 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.829932 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-run-httpd\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.830207 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.830286 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-log-httpd\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.838322 4868 scope.go:117] "RemoveContainer" containerID="b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df" Oct 03 13:11:32 crc kubenswrapper[4868]: E1003 13:11:32.838974 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df\": container with ID starting with b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df not found: ID does not exist" containerID="b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.839031 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df"} err="failed to get container status \"b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df\": rpc error: code = NotFound desc = could not find container \"b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df\": container with ID starting with b6544c16f108c89d4ee202f61561e1954373a0e5d1c519cd3aae6039aeb0d4df not found: ID does not exist" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.839079 4868 scope.go:117] "RemoveContainer" containerID="a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c" Oct 03 13:11:32 crc kubenswrapper[4868]: E1003 13:11:32.839549 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c\": container with ID starting with a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c not found: ID does not exist" containerID="a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.839582 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c"} err="failed to get container status \"a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c\": rpc error: code = NotFound desc = could not find container \"a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c\": container with ID starting with a1600af40b00cf7c1aca8ddd0d44e7425e8f61d8ef1e9a116261b8368f85e77c not found: ID does not exist" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.839607 4868 scope.go:117] "RemoveContainer" containerID="8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8" Oct 03 13:11:32 crc kubenswrapper[4868]: E1003 13:11:32.839896 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8\": container with ID starting with 8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8 not found: ID does not exist" containerID="8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.839923 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8"} err="failed to get container status \"8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8\": rpc error: code = NotFound desc = could not find container \"8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8\": container with ID starting with 8b978c620a203f28268a5d9eddc7b0c56c727820814a10399726ff0e845e1df8 not found: ID does not exist" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.850336 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.931920 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x5d2\" (UniqueName: \"kubernetes.io/projected/65fa5327-5088-4f07-9efd-3685ce55a528-kube-api-access-6x5d2\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.932343 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.932402 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-run-httpd\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.932495 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.932529 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-log-httpd\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.932598 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-config-data\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.932630 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-scripts\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.933104 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-run-httpd\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.933408 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-log-httpd\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.937265 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.937946 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-config-data\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.938222 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.940010 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-scripts\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:32 crc kubenswrapper[4868]: I1003 13:11:32.959905 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x5d2\" (UniqueName: \"kubernetes.io/projected/65fa5327-5088-4f07-9efd-3685ce55a528-kube-api-access-6x5d2\") pod \"ceilometer-0\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " pod="openstack/ceilometer-0" Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.027358 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.517009 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:11:33 crc kubenswrapper[4868]: W1003 13:11:33.521138 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod65fa5327_5088_4f07_9efd_3685ce55a528.slice/crio-f16a9293a43b4082466426a69b2f44e0a0ce881c613a2087867ba8279bf8127d WatchSource:0}: Error finding container f16a9293a43b4082466426a69b2f44e0a0ce881c613a2087867ba8279bf8127d: Status 404 returned error can't find the container with id f16a9293a43b4082466426a69b2f44e0a0ce881c613a2087867ba8279bf8127d Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.589276 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerStarted","Data":"f16a9293a43b4082466426a69b2f44e0a0ce881c613a2087867ba8279bf8127d"} Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.591126 4868 generic.go:334] "Generic (PLEG): container finished" podID="ff373899-8e15-4a17-a2dc-ae81859fc44e" containerID="e493069de42bc4c6d847ec08e2b7a4ba442ef271a398b9a66912a2316d7cc0a7" exitCode=0 Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.591238 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdrmw" event={"ID":"ff373899-8e15-4a17-a2dc-ae81859fc44e","Type":"ContainerDied","Data":"e493069de42bc4c6d847ec08e2b7a4ba442ef271a398b9a66912a2316d7cc0a7"} Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.594345 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5786fc7ff8-jg85w" event={"ID":"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f","Type":"ContainerStarted","Data":"cce438819766fe449c79faa8f15100ec81d10fbbba3a1431968adf26658363ff"} Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.594375 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-5786fc7ff8-jg85w" event={"ID":"cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f","Type":"ContainerStarted","Data":"de5d29006b6f9c4ad597550021b67cc591129d1a9f9a444e6f4caa794d768871"} Oct 03 13:11:33 crc kubenswrapper[4868]: I1003 13:11:33.642249 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-5786fc7ff8-jg85w" podStartSLOduration=2.642225731 podStartE2EDuration="2.642225731s" podCreationTimestamp="2025-10-03 13:11:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:33.638214364 +0000 UTC m=+1289.848063430" watchObservedRunningTime="2025-10-03 13:11:33.642225731 +0000 UTC m=+1289.852074797" Oct 03 13:11:34 crc kubenswrapper[4868]: I1003 13:11:34.571158 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4226f68-3dce-4cd2-a376-68cec266cd91" path="/var/lib/kubelet/pods/a4226f68-3dce-4cd2-a376-68cec266cd91/volumes" Oct 03 13:11:34 crc kubenswrapper[4868]: I1003 13:11:34.606366 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerStarted","Data":"95917e28c45d178908b8fe49ce481dd56fedd5af0b372b56f7a4795b9f6d4f30"} Oct 03 13:11:34 crc kubenswrapper[4868]: I1003 13:11:34.606759 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:34 crc kubenswrapper[4868]: I1003 13:11:34.610616 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.049385 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.191179 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-combined-ca-bundle\") pod \"ff373899-8e15-4a17-a2dc-ae81859fc44e\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.191260 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-config-data\") pod \"ff373899-8e15-4a17-a2dc-ae81859fc44e\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.191452 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-scripts\") pod \"ff373899-8e15-4a17-a2dc-ae81859fc44e\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.191493 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tcwf\" (UniqueName: \"kubernetes.io/projected/ff373899-8e15-4a17-a2dc-ae81859fc44e-kube-api-access-9tcwf\") pod \"ff373899-8e15-4a17-a2dc-ae81859fc44e\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.191531 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-db-sync-config-data\") pod \"ff373899-8e15-4a17-a2dc-ae81859fc44e\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.191580 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff373899-8e15-4a17-a2dc-ae81859fc44e-etc-machine-id\") pod \"ff373899-8e15-4a17-a2dc-ae81859fc44e\" (UID: \"ff373899-8e15-4a17-a2dc-ae81859fc44e\") " Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.192076 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ff373899-8e15-4a17-a2dc-ae81859fc44e-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ff373899-8e15-4a17-a2dc-ae81859fc44e" (UID: "ff373899-8e15-4a17-a2dc-ae81859fc44e"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.209396 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "ff373899-8e15-4a17-a2dc-ae81859fc44e" (UID: "ff373899-8e15-4a17-a2dc-ae81859fc44e"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.213316 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.218836 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff373899-8e15-4a17-a2dc-ae81859fc44e-kube-api-access-9tcwf" (OuterVolumeSpecName: "kube-api-access-9tcwf") pod "ff373899-8e15-4a17-a2dc-ae81859fc44e" (UID: "ff373899-8e15-4a17-a2dc-ae81859fc44e"). InnerVolumeSpecName "kube-api-access-9tcwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.216991 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-scripts" (OuterVolumeSpecName: "scripts") pod "ff373899-8e15-4a17-a2dc-ae81859fc44e" (UID: "ff373899-8e15-4a17-a2dc-ae81859fc44e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.230352 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ff373899-8e15-4a17-a2dc-ae81859fc44e" (UID: "ff373899-8e15-4a17-a2dc-ae81859fc44e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.295989 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.296041 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tcwf\" (UniqueName: \"kubernetes.io/projected/ff373899-8e15-4a17-a2dc-ae81859fc44e-kube-api-access-9tcwf\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.296073 4868 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.296083 4868 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ff373899-8e15-4a17-a2dc-ae81859fc44e-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.296094 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.299585 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-p2pkh"] Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.299902 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" podUID="f899ff4a-7f0b-4888-992c-791325ed6746" containerName="dnsmasq-dns" containerID="cri-o://aa11c567e72198d8d64aaa240773f0a3029720c508cdfe2c57a6bca1dac3558b" gracePeriod=10 Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.382950 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-config-data" (OuterVolumeSpecName: "config-data") pod "ff373899-8e15-4a17-a2dc-ae81859fc44e" (UID: "ff373899-8e15-4a17-a2dc-ae81859fc44e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.399926 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ff373899-8e15-4a17-a2dc-ae81859fc44e-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.619655 4868 generic.go:334] "Generic (PLEG): container finished" podID="f899ff4a-7f0b-4888-992c-791325ed6746" containerID="aa11c567e72198d8d64aaa240773f0a3029720c508cdfe2c57a6bca1dac3558b" exitCode=0 Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.619724 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" event={"ID":"f899ff4a-7f0b-4888-992c-791325ed6746","Type":"ContainerDied","Data":"aa11c567e72198d8d64aaa240773f0a3029720c508cdfe2c57a6bca1dac3558b"} Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.624031 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerStarted","Data":"2a7be5d6ca6f99210aa1d94cef45cad0864ec83b62a13e75be4890be463ba43f"} Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.633501 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-mdrmw" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.633470 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-mdrmw" event={"ID":"ff373899-8e15-4a17-a2dc-ae81859fc44e","Type":"ContainerDied","Data":"1e0b5545fd0e16a90089b5f6b0b435e97faa5c26a8b51c05787184e921d8e86b"} Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.633565 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1e0b5545fd0e16a90089b5f6b0b435e97faa5c26a8b51c05787184e921d8e86b" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.871878 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:35 crc kubenswrapper[4868]: E1003 13:11:35.872747 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff373899-8e15-4a17-a2dc-ae81859fc44e" containerName="cinder-db-sync" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.872768 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff373899-8e15-4a17-a2dc-ae81859fc44e" containerName="cinder-db-sync" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.872950 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff373899-8e15-4a17-a2dc-ae81859fc44e" containerName="cinder-db-sync" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.874136 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.885546 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.885764 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.886204 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-bgz86" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.886785 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Oct 03 13:11:35 crc kubenswrapper[4868]: I1003 13:11:35.896248 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.011148 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-542pn"] Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.016331 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.016401 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0064d985-a16a-4026-b65e-c2bd7b75ae6d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.016432 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxfn6\" (UniqueName: \"kubernetes.io/projected/0064d985-a16a-4026-b65e-c2bd7b75ae6d-kube-api-access-qxfn6\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.016511 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.016529 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.016594 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-scripts\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.017475 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.038957 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-542pn"] Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.104911 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124440 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124569 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124695 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-scripts\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124726 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124807 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8qf2v\" (UniqueName: \"kubernetes.io/projected/feae4f75-0fab-4d6a-ad00-90c66140d20d-kube-api-access-8qf2v\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124881 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-config\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124907 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124965 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0064d985-a16a-4026-b65e-c2bd7b75ae6d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.124991 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxfn6\" (UniqueName: \"kubernetes.io/projected/0064d985-a16a-4026-b65e-c2bd7b75ae6d-kube-api-access-qxfn6\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.125106 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.125146 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.125171 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.133960 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0064d985-a16a-4026-b65e-c2bd7b75ae6d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.140070 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.142084 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-scripts\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.142942 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.161350 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.172698 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxfn6\" (UniqueName: \"kubernetes.io/projected/0064d985-a16a-4026-b65e-c2bd7b75ae6d-kube-api-access-qxfn6\") pod \"cinder-scheduler-0\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.208777 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.230961 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-nb\") pod \"f899ff4a-7f0b-4888-992c-791325ed6746\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.231077 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-swift-storage-0\") pod \"f899ff4a-7f0b-4888-992c-791325ed6746\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.231210 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-sb\") pod \"f899ff4a-7f0b-4888-992c-791325ed6746\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.231283 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-svc\") pod \"f899ff4a-7f0b-4888-992c-791325ed6746\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.231371 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4x896\" (UniqueName: \"kubernetes.io/projected/f899ff4a-7f0b-4888-992c-791325ed6746-kube-api-access-4x896\") pod \"f899ff4a-7f0b-4888-992c-791325ed6746\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.231660 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-config\") pod \"f899ff4a-7f0b-4888-992c-791325ed6746\" (UID: \"f899ff4a-7f0b-4888-992c-791325ed6746\") " Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.232218 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.232452 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.232544 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.232682 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.232763 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8qf2v\" (UniqueName: \"kubernetes.io/projected/feae4f75-0fab-4d6a-ad00-90c66140d20d-kube-api-access-8qf2v\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.232890 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-config\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.237044 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-svc\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.237090 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-swift-storage-0\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.237790 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-sb\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.238987 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-nb\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.240793 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f899ff4a-7f0b-4888-992c-791325ed6746-kube-api-access-4x896" (OuterVolumeSpecName: "kube-api-access-4x896") pod "f899ff4a-7f0b-4888-992c-791325ed6746" (UID: "f899ff4a-7f0b-4888-992c-791325ed6746"). InnerVolumeSpecName "kube-api-access-4x896". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.242608 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-config\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.276529 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:36 crc kubenswrapper[4868]: E1003 13:11:36.277574 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f899ff4a-7f0b-4888-992c-791325ed6746" containerName="dnsmasq-dns" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.277594 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f899ff4a-7f0b-4888-992c-791325ed6746" containerName="dnsmasq-dns" Oct 03 13:11:36 crc kubenswrapper[4868]: E1003 13:11:36.277645 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f899ff4a-7f0b-4888-992c-791325ed6746" containerName="init" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.277652 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f899ff4a-7f0b-4888-992c-791325ed6746" containerName="init" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.277877 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="f899ff4a-7f0b-4888-992c-791325ed6746" containerName="dnsmasq-dns" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.279800 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.297969 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.314605 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.322495 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8qf2v\" (UniqueName: \"kubernetes.io/projected/feae4f75-0fab-4d6a-ad00-90c66140d20d-kube-api-access-8qf2v\") pod \"dnsmasq-dns-795f4db4bc-542pn\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.352396 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4x896\" (UniqueName: \"kubernetes.io/projected/f899ff4a-7f0b-4888-992c-791325ed6746-kube-api-access-4x896\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.454199 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-logs\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.454257 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.454318 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.454345 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-924nw\" (UniqueName: \"kubernetes.io/projected/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-kube-api-access-924nw\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.454387 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.454468 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-scripts\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.454496 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data-custom\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.522028 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.557983 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-scripts\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.558030 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data-custom\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.559235 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-logs\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.559275 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.559337 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.559358 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-924nw\" (UniqueName: \"kubernetes.io/projected/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-kube-api-access-924nw\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.559400 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.559683 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.560549 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-logs\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.583396 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-scripts\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.584349 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.630735 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data-custom\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.631935 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.668285 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f899ff4a-7f0b-4888-992c-791325ed6746" (UID: "f899ff4a-7f0b-4888-992c-791325ed6746"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.676968 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-924nw\" (UniqueName: \"kubernetes.io/projected/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-kube-api-access-924nw\") pod \"cinder-api-0\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.677916 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f899ff4a-7f0b-4888-992c-791325ed6746" (UID: "f899ff4a-7f0b-4888-992c-791325ed6746"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.690452 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.700308 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f899ff4a-7f0b-4888-992c-791325ed6746" (UID: "f899ff4a-7f0b-4888-992c-791325ed6746"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.700666 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-config" (OuterVolumeSpecName: "config") pod "f899ff4a-7f0b-4888-992c-791325ed6746" (UID: "f899ff4a-7f0b-4888-992c-791325ed6746"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.711870 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerStarted","Data":"20adf87d55d5cb46fabd0aca71d56766786e667ea51f5978b7790304a0e741af"} Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.723415 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" event={"ID":"f899ff4a-7f0b-4888-992c-791325ed6746","Type":"ContainerDied","Data":"a7ee712a4493b0e0ca7e093611eb6bb2eb509d2ab0147d3190d939ff9805aa96"} Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.723480 4868 scope.go:117] "RemoveContainer" containerID="aa11c567e72198d8d64aaa240773f0a3029720c508cdfe2c57a6bca1dac3558b" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.723851 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-58dd9ff6bc-p2pkh" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.734855 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f899ff4a-7f0b-4888-992c-791325ed6746" (UID: "f899ff4a-7f0b-4888-992c-791325ed6746"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.766695 4868 scope.go:117] "RemoveContainer" containerID="7da7ec9b8b1f4d6fefbc9658911bd533e8674d052260f768b12b52d5d04e1cd7" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.780248 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.780314 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.780329 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.781818 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.782034 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f899ff4a-7f0b-4888-992c-791325ed6746-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:36 crc kubenswrapper[4868]: I1003 13:11:36.931361 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.102880 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-p2pkh"] Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.121884 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-58dd9ff6bc-p2pkh"] Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.248139 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-542pn"] Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.430713 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:37 crc kubenswrapper[4868]: W1003 13:11:37.435213 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8ff0b93_eb15_4b8c_8466_75a5d641aec7.slice/crio-ae17a25ee2c5d3610a1b52c481422ba713f4ce68192d12da6f0b6dbd8c97a5ba WatchSource:0}: Error finding container ae17a25ee2c5d3610a1b52c481422ba713f4ce68192d12da6f0b6dbd8c97a5ba: Status 404 returned error can't find the container with id ae17a25ee2c5d3610a1b52c481422ba713f4ce68192d12da6f0b6dbd8c97a5ba Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.742923 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0064d985-a16a-4026-b65e-c2bd7b75ae6d","Type":"ContainerStarted","Data":"cc41a2bae163d8fdaec41413ababf41a8ade0f94bd1d50cc2487765fd4208805"} Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.746773 4868 generic.go:334] "Generic (PLEG): container finished" podID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerID="fe377502de5861f60b56c5d678bbecc371716ec6d45ae2f6f7ae87f563433667" exitCode=0 Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.746844 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" event={"ID":"feae4f75-0fab-4d6a-ad00-90c66140d20d","Type":"ContainerDied","Data":"fe377502de5861f60b56c5d678bbecc371716ec6d45ae2f6f7ae87f563433667"} Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.746874 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" event={"ID":"feae4f75-0fab-4d6a-ad00-90c66140d20d","Type":"ContainerStarted","Data":"66f701e4b0346bfcaf69ea9e00355bc146393a44bf4b0d84512606ae26b67a54"} Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.750308 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8ff0b93-eb15-4b8c-8466-75a5d641aec7","Type":"ContainerStarted","Data":"ae17a25ee2c5d3610a1b52c481422ba713f4ce68192d12da6f0b6dbd8c97a5ba"} Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.764241 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:37 crc kubenswrapper[4868]: I1003 13:11:37.830558 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.095531 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.533150 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.536089 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.564717 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f899ff4a-7f0b-4888-992c-791325ed6746" path="/var/lib/kubelet/pods/f899ff4a-7f0b-4888-992c-791325ed6746/volumes" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.565624 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.565693 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.606207 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.624228 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.684754 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.685950 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.798237 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" event={"ID":"feae4f75-0fab-4d6a-ad00-90c66140d20d","Type":"ContainerStarted","Data":"7a4fa0025558174ac3ffd01fde40998eefb52968f067932a847f32ba0da86b4d"} Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.821908 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerStarted","Data":"f2f50ebd2d8572f1a78d7d015032d6d15e38fcd128b69feec11787b9ddd20804"} Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.826922 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8ff0b93-eb15-4b8c-8466-75a5d641aec7","Type":"ContainerStarted","Data":"2dbbefc6409bcabd623a52e052e5eeb2aeedf526852234cb6b894d39f1210a87"} Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.829285 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.829339 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.829355 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:38 crc kubenswrapper[4868]: I1003 13:11:38.829369 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:11:39 crc kubenswrapper[4868]: I1003 13:11:39.150448 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:39 crc kubenswrapper[4868]: I1003 13:11:39.873701 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0064d985-a16a-4026-b65e-c2bd7b75ae6d","Type":"ContainerStarted","Data":"396e814dde3af88b22280b722f9c8b134d099dc87132ecb9b4f2cd0ec789ae1b"} Oct 03 13:11:39 crc kubenswrapper[4868]: I1003 13:11:39.875971 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:39 crc kubenswrapper[4868]: I1003 13:11:39.943644 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.283527867 podStartE2EDuration="7.943616883s" podCreationTimestamp="2025-10-03 13:11:32 +0000 UTC" firstStartedPulling="2025-10-03 13:11:33.524088065 +0000 UTC m=+1289.733937131" lastFinishedPulling="2025-10-03 13:11:38.184177081 +0000 UTC m=+1294.394026147" observedRunningTime="2025-10-03 13:11:39.934826157 +0000 UTC m=+1296.144675233" watchObservedRunningTime="2025-10-03 13:11:39.943616883 +0000 UTC m=+1296.153465969" Oct 03 13:11:39 crc kubenswrapper[4868]: I1003 13:11:39.944855 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" podStartSLOduration=4.944847016 podStartE2EDuration="4.944847016s" podCreationTimestamp="2025-10-03 13:11:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:39.901528025 +0000 UTC m=+1296.111377091" watchObservedRunningTime="2025-10-03 13:11:39.944847016 +0000 UTC m=+1296.154696082" Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.887127 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0064d985-a16a-4026-b65e-c2bd7b75ae6d","Type":"ContainerStarted","Data":"931d65fb2f90c9baadafa1bdbe76dc5ca8716dce3291425ecc633acffd20942f"} Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.889606 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8ff0b93-eb15-4b8c-8466-75a5d641aec7","Type":"ContainerStarted","Data":"802c75826542378aba425deeee03397d347444782e5362d4b9c8f961aa493e0c"} Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.889780 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.889816 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.889884 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api-log" containerID="cri-o://2dbbefc6409bcabd623a52e052e5eeb2aeedf526852234cb6b894d39f1210a87" gracePeriod=30 Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.889936 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api" containerID="cri-o://802c75826542378aba425deeee03397d347444782e5362d4b9c8f961aa493e0c" gracePeriod=30 Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.912317 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:11:40 crc kubenswrapper[4868]: I1003 13:11:40.931646 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.848891995 podStartE2EDuration="5.931609517s" podCreationTimestamp="2025-10-03 13:11:35 +0000 UTC" firstStartedPulling="2025-10-03 13:11:36.970469497 +0000 UTC m=+1293.180318563" lastFinishedPulling="2025-10-03 13:11:38.053187019 +0000 UTC m=+1294.263036085" observedRunningTime="2025-10-03 13:11:40.913978874 +0000 UTC m=+1297.123827950" watchObservedRunningTime="2025-10-03 13:11:40.931609517 +0000 UTC m=+1297.141458583" Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.210558 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.344030 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-6545c458bd-ttzj2" Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.380596 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=5.380572612 podStartE2EDuration="5.380572612s" podCreationTimestamp="2025-10-03 13:11:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:40.987507555 +0000 UTC m=+1297.197356621" watchObservedRunningTime="2025-10-03 13:11:41.380572612 +0000 UTC m=+1297.590421678" Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.428193 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-545874b5c8-jnl6d"] Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.692842 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.705680 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.939467 4868 generic.go:334] "Generic (PLEG): container finished" podID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerID="802c75826542378aba425deeee03397d347444782e5362d4b9c8f961aa493e0c" exitCode=0 Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.939948 4868 generic.go:334] "Generic (PLEG): container finished" podID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerID="2dbbefc6409bcabd623a52e052e5eeb2aeedf526852234cb6b894d39f1210a87" exitCode=143 Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.940126 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8ff0b93-eb15-4b8c-8466-75a5d641aec7","Type":"ContainerDied","Data":"802c75826542378aba425deeee03397d347444782e5362d4b9c8f961aa493e0c"} Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.940168 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8ff0b93-eb15-4b8c-8466-75a5d641aec7","Type":"ContainerDied","Data":"2dbbefc6409bcabd623a52e052e5eeb2aeedf526852234cb6b894d39f1210a87"} Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.940425 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-545874b5c8-jnl6d" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon-log" containerID="cri-o://6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9" gracePeriod=30 Oct 03 13:11:41 crc kubenswrapper[4868]: I1003 13:11:41.941115 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-545874b5c8-jnl6d" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" containerID="cri-o://88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab" gracePeriod=30 Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.104119 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.279863 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-scripts\") pod \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.279944 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data-custom\") pod \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.280032 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data\") pod \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.281248 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-924nw\" (UniqueName: \"kubernetes.io/projected/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-kube-api-access-924nw\") pod \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.281404 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-logs\") pod \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.281458 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-combined-ca-bundle\") pod \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.281492 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-etc-machine-id\") pod \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\" (UID: \"b8ff0b93-eb15-4b8c-8466-75a5d641aec7\") " Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.282128 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b8ff0b93-eb15-4b8c-8466-75a5d641aec7" (UID: "b8ff0b93-eb15-4b8c-8466-75a5d641aec7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.284387 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-logs" (OuterVolumeSpecName: "logs") pod "b8ff0b93-eb15-4b8c-8466-75a5d641aec7" (UID: "b8ff0b93-eb15-4b8c-8466-75a5d641aec7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.286309 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.286367 4868 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.292236 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b8ff0b93-eb15-4b8c-8466-75a5d641aec7" (UID: "b8ff0b93-eb15-4b8c-8466-75a5d641aec7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.297664 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-scripts" (OuterVolumeSpecName: "scripts") pod "b8ff0b93-eb15-4b8c-8466-75a5d641aec7" (UID: "b8ff0b93-eb15-4b8c-8466-75a5d641aec7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.322399 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-kube-api-access-924nw" (OuterVolumeSpecName: "kube-api-access-924nw") pod "b8ff0b93-eb15-4b8c-8466-75a5d641aec7" (UID: "b8ff0b93-eb15-4b8c-8466-75a5d641aec7"). InnerVolumeSpecName "kube-api-access-924nw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.380425 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b8ff0b93-eb15-4b8c-8466-75a5d641aec7" (UID: "b8ff0b93-eb15-4b8c-8466-75a5d641aec7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.390082 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-cbcbd76c4-z459l" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.399342 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.399380 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.399390 4868 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.399402 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-924nw\" (UniqueName: \"kubernetes.io/projected/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-kube-api-access-924nw\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.418111 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data" (OuterVolumeSpecName: "config-data") pod "b8ff0b93-eb15-4b8c-8466-75a5d641aec7" (UID: "b8ff0b93-eb15-4b8c-8466-75a5d641aec7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.474568 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5f6c765c74-2kv7n"] Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.474867 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5f6c765c74-2kv7n" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api-log" containerID="cri-o://2ec4200617faae7c8f5af8044528e818cdd7eba9d77ae5c21a542103a65dc26b" gracePeriod=30 Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.475356 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-5f6c765c74-2kv7n" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api" containerID="cri-o://aa8b26c2a595c05ebde88c41e17d0f99240a45079271babacbf828094f133954" gracePeriod=30 Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.509758 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8ff0b93-eb15-4b8c-8466-75a5d641aec7-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:42 crc kubenswrapper[4868]: E1003 13:11:42.776091 4868 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8ff0b93_eb15_4b8c_8466_75a5d641aec7.slice\": RecentStats: unable to find data in memory cache]" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.801234 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.801394 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.953606 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b8ff0b93-eb15-4b8c-8466-75a5d641aec7","Type":"ContainerDied","Data":"ae17a25ee2c5d3610a1b52c481422ba713f4ce68192d12da6f0b6dbd8c97a5ba"} Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.954103 4868 scope.go:117] "RemoveContainer" containerID="802c75826542378aba425deeee03397d347444782e5362d4b9c8f961aa493e0c" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.953678 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.960877 4868 generic.go:334] "Generic (PLEG): container finished" podID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerID="2ec4200617faae7c8f5af8044528e818cdd7eba9d77ae5c21a542103a65dc26b" exitCode=143 Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.961882 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6c765c74-2kv7n" event={"ID":"2b92a9aa-964f-4783-b419-aa3e5776298f","Type":"ContainerDied","Data":"2ec4200617faae7c8f5af8044528e818cdd7eba9d77ae5c21a542103a65dc26b"} Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.993195 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:42 crc kubenswrapper[4868]: I1003 13:11:42.995455 4868 scope.go:117] "RemoveContainer" containerID="2dbbefc6409bcabd623a52e052e5eeb2aeedf526852234cb6b894d39f1210a87" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.001549 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.021087 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:43 crc kubenswrapper[4868]: E1003 13:11:43.021640 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api-log" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.021661 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api-log" Oct 03 13:11:43 crc kubenswrapper[4868]: E1003 13:11:43.021675 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.021686 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.021898 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api-log" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.021918 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" containerName="cinder-api" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.022976 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.027291 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.030509 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.030713 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.051120 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121009 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-scripts\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121094 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-config-data-custom\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121148 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/090414a6-8af1-4239-9263-ee2c57b89414-etc-machine-id\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121190 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pf9zs\" (UniqueName: \"kubernetes.io/projected/090414a6-8af1-4239-9263-ee2c57b89414-kube-api-access-pf9zs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121262 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121281 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-config-data\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121303 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/090414a6-8af1-4239-9263-ee2c57b89414-logs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121333 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.121351 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-public-tls-certs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.222905 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-config-data-custom\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.222998 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/090414a6-8af1-4239-9263-ee2c57b89414-etc-machine-id\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.223072 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pf9zs\" (UniqueName: \"kubernetes.io/projected/090414a6-8af1-4239-9263-ee2c57b89414-kube-api-access-pf9zs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.223138 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.223161 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-config-data\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.223183 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/090414a6-8af1-4239-9263-ee2c57b89414-logs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.223206 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.223227 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-public-tls-certs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.223268 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-scripts\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.224165 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/090414a6-8af1-4239-9263-ee2c57b89414-etc-machine-id\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.224949 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/090414a6-8af1-4239-9263-ee2c57b89414-logs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.228153 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.229759 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-scripts\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.229777 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.231080 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-public-tls-certs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.231716 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-config-data-custom\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.233673 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.234854 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.245329 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-config-data\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.251446 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pf9zs\" (UniqueName: \"kubernetes.io/projected/090414a6-8af1-4239-9263-ee2c57b89414-kube-api-access-pf9zs\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.254312 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/090414a6-8af1-4239-9263-ee2c57b89414-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"090414a6-8af1-4239-9263-ee2c57b89414\") " pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.343617 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Oct 03 13:11:43 crc kubenswrapper[4868]: I1003 13:11:43.498412 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:11:44 crc kubenswrapper[4868]: I1003 13:11:44.090147 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Oct 03 13:11:44 crc kubenswrapper[4868]: I1003 13:11:44.566593 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8ff0b93-eb15-4b8c-8466-75a5d641aec7" path="/var/lib/kubelet/pods/b8ff0b93-eb15-4b8c-8466-75a5d641aec7/volumes" Oct 03 13:11:45 crc kubenswrapper[4868]: I1003 13:11:45.027997 4868 generic.go:334] "Generic (PLEG): container finished" podID="29d1fa62-1c44-47f6-9c4c-c9023d4ef342" containerID="5fc7d3eab193e67830007fa455e9ab28deddf6084edd6b36056954ab14d726a5" exitCode=0 Oct 03 13:11:45 crc kubenswrapper[4868]: I1003 13:11:45.028108 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8krw5" event={"ID":"29d1fa62-1c44-47f6-9c4c-c9023d4ef342","Type":"ContainerDied","Data":"5fc7d3eab193e67830007fa455e9ab28deddf6084edd6b36056954ab14d726a5"} Oct 03 13:11:45 crc kubenswrapper[4868]: I1003 13:11:45.033658 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"090414a6-8af1-4239-9263-ee2c57b89414","Type":"ContainerStarted","Data":"2300ff26cb2931dcf3655ac0dc697e2a25284a304e2a88a8faaf6617ad4b60ce"} Oct 03 13:11:45 crc kubenswrapper[4868]: I1003 13:11:45.116682 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-545874b5c8-jnl6d" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": read tcp 10.217.0.2:54822->10.217.0.142:8443: read: connection reset by peer" Oct 03 13:11:45 crc kubenswrapper[4868]: I1003 13:11:45.894544 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-64cb6d74d6-hlvzr" Oct 03 13:11:45 crc kubenswrapper[4868]: I1003 13:11:45.908964 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5f6c765c74-2kv7n" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:44920->10.217.0.154:9311: read: connection reset by peer" Oct 03 13:11:45 crc kubenswrapper[4868]: I1003 13:11:45.909865 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-5f6c765c74-2kv7n" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.154:9311/healthcheck\": read tcp 10.217.0.2:44922->10.217.0.154:9311: read: connection reset by peer" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.075517 4868 generic.go:334] "Generic (PLEG): container finished" podID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerID="88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab" exitCode=0 Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.075587 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-545874b5c8-jnl6d" event={"ID":"a0820dcb-cd35-41c2-8977-7d999feab9b2","Type":"ContainerDied","Data":"88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab"} Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.078538 4868 generic.go:334] "Generic (PLEG): container finished" podID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerID="aa8b26c2a595c05ebde88c41e17d0f99240a45079271babacbf828094f133954" exitCode=0 Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.078605 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6c765c74-2kv7n" event={"ID":"2b92a9aa-964f-4783-b419-aa3e5776298f","Type":"ContainerDied","Data":"aa8b26c2a595c05ebde88c41e17d0f99240a45079271babacbf828094f133954"} Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.082078 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"090414a6-8af1-4239-9263-ee2c57b89414","Type":"ContainerStarted","Data":"5a62416f85a61a8ee0139abd5910af171084b588b950ae197441541100f28e65"} Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.082123 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"090414a6-8af1-4239-9263-ee2c57b89414","Type":"ContainerStarted","Data":"d4c6002fc31aa8a0a29e03a6dfa4013d3d21dc5f3fd81293b65493e40a5efd8e"} Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.083786 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.125581 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=4.125551564 podStartE2EDuration="4.125551564s" podCreationTimestamp="2025-10-03 13:11:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:46.109654537 +0000 UTC m=+1302.319503623" watchObservedRunningTime="2025-10-03 13:11:46.125551564 +0000 UTC m=+1302.335400630" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.524328 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.668607 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-8f94c"] Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.668885 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" podUID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerName="dnsmasq-dns" containerID="cri-o://6ee6f6a2945005220c600e5e00fe092e3ac0edfc59e56bb58444a4bad07a90ad" gracePeriod=10 Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.704185 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.705570 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.709593 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.709877 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.710002 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-6s25n" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.734609 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.734705 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.734835 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzq46\" (UniqueName: \"kubernetes.io/projected/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-kube-api-access-fzq46\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.734863 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config-secret\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.739259 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.751858 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.766609 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.869253 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzq46\" (UniqueName: \"kubernetes.io/projected/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-kube-api-access-fzq46\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.869329 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config-secret\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.869385 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.869487 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.904677 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-combined-ca-bundle\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.904693 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzq46\" (UniqueName: \"kubernetes.io/projected/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-kube-api-access-fzq46\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.915246 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.929038 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config-secret\") pod \"openstackclient\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " pod="openstack/openstackclient" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.960882 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.970982 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data-custom\") pod \"2b92a9aa-964f-4783-b419-aa3e5776298f\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.971339 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-combined-ca-bundle\") pod \"2b92a9aa-964f-4783-b419-aa3e5776298f\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.971381 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r4mxp\" (UniqueName: \"kubernetes.io/projected/2b92a9aa-964f-4783-b419-aa3e5776298f-kube-api-access-r4mxp\") pod \"2b92a9aa-964f-4783-b419-aa3e5776298f\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.971410 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data\") pod \"2b92a9aa-964f-4783-b419-aa3e5776298f\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.971459 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b92a9aa-964f-4783-b419-aa3e5776298f-logs\") pod \"2b92a9aa-964f-4783-b419-aa3e5776298f\" (UID: \"2b92a9aa-964f-4783-b419-aa3e5776298f\") " Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.978379 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b92a9aa-964f-4783-b419-aa3e5776298f-logs" (OuterVolumeSpecName: "logs") pod "2b92a9aa-964f-4783-b419-aa3e5776298f" (UID: "2b92a9aa-964f-4783-b419-aa3e5776298f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:11:46 crc kubenswrapper[4868]: I1003 13:11:46.989108 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b92a9aa-964f-4783-b419-aa3e5776298f-kube-api-access-r4mxp" (OuterVolumeSpecName: "kube-api-access-r4mxp") pod "2b92a9aa-964f-4783-b419-aa3e5776298f" (UID: "2b92a9aa-964f-4783-b419-aa3e5776298f"). InnerVolumeSpecName "kube-api-access-r4mxp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:46.999465 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.000496 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.007267 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2b92a9aa-964f-4783-b419-aa3e5776298f" (UID: "2b92a9aa-964f-4783-b419-aa3e5776298f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.021441 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.045503 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Oct 03 13:11:47 crc kubenswrapper[4868]: E1003 13:11:47.046153 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.046172 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api" Oct 03 13:11:47 crc kubenswrapper[4868]: E1003 13:11:47.046187 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api-log" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.046193 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api-log" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.046430 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api-log" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.046451 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" containerName="barbican-api" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.051494 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.060164 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.065052 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data" (OuterVolumeSpecName: "config-data") pod "2b92a9aa-964f-4783-b419-aa3e5776298f" (UID: "2b92a9aa-964f-4783-b419-aa3e5776298f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.076933 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r4mxp\" (UniqueName: \"kubernetes.io/projected/2b92a9aa-964f-4783-b419-aa3e5776298f-kube-api-access-r4mxp\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.076972 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.076984 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2b92a9aa-964f-4783-b419-aa3e5776298f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.076993 4868 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.088423 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b92a9aa-964f-4783-b419-aa3e5776298f" (UID: "2b92a9aa-964f-4783-b419-aa3e5776298f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.105542 4868 generic.go:334] "Generic (PLEG): container finished" podID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerID="6ee6f6a2945005220c600e5e00fe092e3ac0edfc59e56bb58444a4bad07a90ad" exitCode=0 Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.105619 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" event={"ID":"cab73c7e-f1c2-413d-b8e9-f61e35ed604b","Type":"ContainerDied","Data":"6ee6f6a2945005220c600e5e00fe092e3ac0edfc59e56bb58444a4bad07a90ad"} Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.109325 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-8krw5" event={"ID":"29d1fa62-1c44-47f6-9c4c-c9023d4ef342","Type":"ContainerDied","Data":"d779da3b645d00c413ac0d61a8895948835926a9134f41c0195170a5a1e68f66"} Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.109371 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d779da3b645d00c413ac0d61a8895948835926a9134f41c0195170a5a1e68f66" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.116165 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5f6c765c74-2kv7n" event={"ID":"2b92a9aa-964f-4783-b419-aa3e5776298f","Type":"ContainerDied","Data":"3ff1b4d958ce9174f0442dfe950b6a45cb40d3be3ed69d97f757a20b73137c3f"} Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.116183 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5f6c765c74-2kv7n" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.116334 4868 scope.go:117] "RemoveContainer" containerID="aa8b26c2a595c05ebde88c41e17d0f99240a45079271babacbf828094f133954" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.116980 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="probe" containerID="cri-o://931d65fb2f90c9baadafa1bdbe76dc5ca8716dce3291425ecc633acffd20942f" gracePeriod=30 Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.117012 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="cinder-scheduler" containerID="cri-o://396e814dde3af88b22280b722f9c8b134d099dc87132ecb9b4f2cd0ec789ae1b" gracePeriod=30 Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.152213 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8krw5" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.178928 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc1e34f6-e4f8-4287-a809-8f25736927d0-openstack-config-secret\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.179848 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4z8gb\" (UniqueName: \"kubernetes.io/projected/cc1e34f6-e4f8-4287-a809-8f25736927d0-kube-api-access-4z8gb\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.179926 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cc1e34f6-e4f8-4287-a809-8f25736927d0-openstack-config\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.179974 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc1e34f6-e4f8-4287-a809-8f25736927d0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.180037 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b92a9aa-964f-4783-b419-aa3e5776298f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.180123 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-5f6c765c74-2kv7n"] Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.200829 4868 scope.go:117] "RemoveContainer" containerID="2ec4200617faae7c8f5af8044528e818cdd7eba9d77ae5c21a542103a65dc26b" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.212364 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-5f6c765c74-2kv7n"] Oct 03 13:11:47 crc kubenswrapper[4868]: E1003 13:11:47.251290 4868 log.go:32] "RunPodSandbox from runtime service failed" err=< Oct 03 13:11:47 crc kubenswrapper[4868]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_d39f17f6-fcb8-49c5-a75a-aa971a2e381c_0(dcbf4db6676b282c6f520cb9c7b2694c606be157fa7c67ff6600a83f7d7f64c8): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"dcbf4db6676b282c6f520cb9c7b2694c606be157fa7c67ff6600a83f7d7f64c8" Netns:"/var/run/netns/45a6cebb-dc40-44dd-9fa3-b5cb2d3f8bec" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=dcbf4db6676b282c6f520cb9c7b2694c606be157fa7c67ff6600a83f7d7f64c8;K8S_POD_UID=d39f17f6-fcb8-49c5-a75a-aa971a2e381c" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/d39f17f6-fcb8-49c5-a75a-aa971a2e381c]: expected pod UID "d39f17f6-fcb8-49c5-a75a-aa971a2e381c" but got "cc1e34f6-e4f8-4287-a809-8f25736927d0" from Kube API Oct 03 13:11:47 crc kubenswrapper[4868]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 03 13:11:47 crc kubenswrapper[4868]: > Oct 03 13:11:47 crc kubenswrapper[4868]: E1003 13:11:47.251374 4868 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err=< Oct 03 13:11:47 crc kubenswrapper[4868]: rpc error: code = Unknown desc = failed to create pod network sandbox k8s_openstackclient_openstack_d39f17f6-fcb8-49c5-a75a-aa971a2e381c_0(dcbf4db6676b282c6f520cb9c7b2694c606be157fa7c67ff6600a83f7d7f64c8): error adding pod openstack_openstackclient to CNI network "multus-cni-network": plugin type="multus-shim" name="multus-cni-network" failed (add): CmdAdd (shim): CNI request failed with status 400: 'ContainerID:"dcbf4db6676b282c6f520cb9c7b2694c606be157fa7c67ff6600a83f7d7f64c8" Netns:"/var/run/netns/45a6cebb-dc40-44dd-9fa3-b5cb2d3f8bec" IfName:"eth0" Args:"IgnoreUnknown=1;K8S_POD_NAMESPACE=openstack;K8S_POD_NAME=openstackclient;K8S_POD_INFRA_CONTAINER_ID=dcbf4db6676b282c6f520cb9c7b2694c606be157fa7c67ff6600a83f7d7f64c8;K8S_POD_UID=d39f17f6-fcb8-49c5-a75a-aa971a2e381c" Path:"" ERRORED: error configuring pod [openstack/openstackclient] networking: Multus: [openstack/openstackclient/d39f17f6-fcb8-49c5-a75a-aa971a2e381c]: expected pod UID "d39f17f6-fcb8-49c5-a75a-aa971a2e381c" but got "cc1e34f6-e4f8-4287-a809-8f25736927d0" from Kube API Oct 03 13:11:47 crc kubenswrapper[4868]: ': StdinData: {"binDir":"/var/lib/cni/bin","clusterNetwork":"/host/run/multus/cni/net.d/10-ovn-kubernetes.conf","cniVersion":"0.3.1","daemonSocketDir":"/run/multus/socket","globalNamespaces":"default,openshift-multus,openshift-sriov-network-operator,openshift-cnv","logLevel":"verbose","logToStderr":true,"name":"multus-cni-network","namespaceIsolation":true,"type":"multus-shim"} Oct 03 13:11:47 crc kubenswrapper[4868]: > pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.282969 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-config\") pod \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.283117 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-combined-ca-bundle\") pod \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.283295 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j87tf\" (UniqueName: \"kubernetes.io/projected/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-kube-api-access-j87tf\") pod \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\" (UID: \"29d1fa62-1c44-47f6-9c4c-c9023d4ef342\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.283665 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc1e34f6-e4f8-4287-a809-8f25736927d0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.283775 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc1e34f6-e4f8-4287-a809-8f25736927d0-openstack-config-secret\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.283981 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4z8gb\" (UniqueName: \"kubernetes.io/projected/cc1e34f6-e4f8-4287-a809-8f25736927d0-kube-api-access-4z8gb\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.284037 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cc1e34f6-e4f8-4287-a809-8f25736927d0-openstack-config\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.285878 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/cc1e34f6-e4f8-4287-a809-8f25736927d0-openstack-config\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.296701 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/cc1e34f6-e4f8-4287-a809-8f25736927d0-openstack-config-secret\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.316932 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cc1e34f6-e4f8-4287-a809-8f25736927d0-combined-ca-bundle\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.318746 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-kube-api-access-j87tf" (OuterVolumeSpecName: "kube-api-access-j87tf") pod "29d1fa62-1c44-47f6-9c4c-c9023d4ef342" (UID: "29d1fa62-1c44-47f6-9c4c-c9023d4ef342"). InnerVolumeSpecName "kube-api-access-j87tf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.353866 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4z8gb\" (UniqueName: \"kubernetes.io/projected/cc1e34f6-e4f8-4287-a809-8f25736927d0-kube-api-access-4z8gb\") pod \"openstackclient\" (UID: \"cc1e34f6-e4f8-4287-a809-8f25736927d0\") " pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.385734 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j87tf\" (UniqueName: \"kubernetes.io/projected/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-kube-api-access-j87tf\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.393277 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "29d1fa62-1c44-47f6-9c4c-c9023d4ef342" (UID: "29d1fa62-1c44-47f6-9c4c-c9023d4ef342"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.394263 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-config" (OuterVolumeSpecName: "config") pod "29d1fa62-1c44-47f6-9c4c-c9023d4ef342" (UID: "29d1fa62-1c44-47f6-9c4c-c9023d4ef342"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.402107 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.487825 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-nb\") pod \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.487949 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-sb\") pod \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.488024 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-config\") pod \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.488105 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8cvmk\" (UniqueName: \"kubernetes.io/projected/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-kube-api-access-8cvmk\") pod \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.488168 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-svc\") pod \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.488211 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-swift-storage-0\") pod \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\" (UID: \"cab73c7e-f1c2-413d-b8e9-f61e35ed604b\") " Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.488702 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.488727 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/29d1fa62-1c44-47f6-9c4c-c9023d4ef342-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.506046 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-kube-api-access-8cvmk" (OuterVolumeSpecName: "kube-api-access-8cvmk") pod "cab73c7e-f1c2-413d-b8e9-f61e35ed604b" (UID: "cab73c7e-f1c2-413d-b8e9-f61e35ed604b"). InnerVolumeSpecName "kube-api-access-8cvmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.527252 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.558035 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "cab73c7e-f1c2-413d-b8e9-f61e35ed604b" (UID: "cab73c7e-f1c2-413d-b8e9-f61e35ed604b"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.586451 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-config" (OuterVolumeSpecName: "config") pod "cab73c7e-f1c2-413d-b8e9-f61e35ed604b" (UID: "cab73c7e-f1c2-413d-b8e9-f61e35ed604b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.590422 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8cvmk\" (UniqueName: \"kubernetes.io/projected/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-kube-api-access-8cvmk\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.590450 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.590459 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.619202 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "cab73c7e-f1c2-413d-b8e9-f61e35ed604b" (UID: "cab73c7e-f1c2-413d-b8e9-f61e35ed604b"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.659473 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "cab73c7e-f1c2-413d-b8e9-f61e35ed604b" (UID: "cab73c7e-f1c2-413d-b8e9-f61e35ed604b"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.674014 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "cab73c7e-f1c2-413d-b8e9-f61e35ed604b" (UID: "cab73c7e-f1c2-413d-b8e9-f61e35ed604b"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.696867 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.696915 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:47 crc kubenswrapper[4868]: I1003 13:11:47.696928 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cab73c7e-f1c2-413d-b8e9-f61e35ed604b-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.088452 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.132493 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" event={"ID":"cab73c7e-f1c2-413d-b8e9-f61e35ed604b","Type":"ContainerDied","Data":"d14f10a8e4ec95db536ed14472b6482f4efc6d9667067bb5942434bb7eec274a"} Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.132590 4868 scope.go:117] "RemoveContainer" containerID="6ee6f6a2945005220c600e5e00fe092e3ac0edfc59e56bb58444a4bad07a90ad" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.132712 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-586bdc5f9-8f94c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.145946 4868 generic.go:334] "Generic (PLEG): container finished" podID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerID="931d65fb2f90c9baadafa1bdbe76dc5ca8716dce3291425ecc633acffd20942f" exitCode=0 Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.146016 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0064d985-a16a-4026-b65e-c2bd7b75ae6d","Type":"ContainerDied","Data":"931d65fb2f90c9baadafa1bdbe76dc5ca8716dce3291425ecc633acffd20942f"} Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.147364 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"cc1e34f6-e4f8-4287-a809-8f25736927d0","Type":"ContainerStarted","Data":"3e7b0b9fbddc692243c5ca69963bef63ff478c0b2265fc6473a6f93a11178c0a"} Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.149581 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.150331 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-8krw5" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.163846 4868 scope.go:117] "RemoveContainer" containerID="813e8e5b7c6f68af01b52f29e4eed923db753c65b3ce4c92c7e53c28d6742ae0" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.164756 4868 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="d39f17f6-fcb8-49c5-a75a-aa971a2e381c" podUID="cc1e34f6-e4f8-4287-a809-8f25736927d0" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.173228 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.188039 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-8f94c"] Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.195572 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-586bdc5f9-8f94c"] Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.306922 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config\") pod \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.307045 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzq46\" (UniqueName: \"kubernetes.io/projected/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-kube-api-access-fzq46\") pod \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.307305 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-combined-ca-bundle\") pod \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.307342 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config-secret\") pod \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\" (UID: \"d39f17f6-fcb8-49c5-a75a-aa971a2e381c\") " Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.310424 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "d39f17f6-fcb8-49c5-a75a-aa971a2e381c" (UID: "d39f17f6-fcb8-49c5-a75a-aa971a2e381c"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.313324 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-kube-api-access-fzq46" (OuterVolumeSpecName: "kube-api-access-fzq46") pod "d39f17f6-fcb8-49c5-a75a-aa971a2e381c" (UID: "d39f17f6-fcb8-49c5-a75a-aa971a2e381c"). InnerVolumeSpecName "kube-api-access-fzq46". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.314806 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "d39f17f6-fcb8-49c5-a75a-aa971a2e381c" (UID: "d39f17f6-fcb8-49c5-a75a-aa971a2e381c"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.317303 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d39f17f6-fcb8-49c5-a75a-aa971a2e381c" (UID: "d39f17f6-fcb8-49c5-a75a-aa971a2e381c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.411767 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.411808 4868 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.411821 4868 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-openstack-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.411830 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzq46\" (UniqueName: \"kubernetes.io/projected/d39f17f6-fcb8-49c5-a75a-aa971a2e381c-kube-api-access-fzq46\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.429865 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-kbv6c"] Oct 03 13:11:48 crc kubenswrapper[4868]: E1003 13:11:48.430317 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerName="init" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.430333 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerName="init" Oct 03 13:11:48 crc kubenswrapper[4868]: E1003 13:11:48.430343 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerName="dnsmasq-dns" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.430349 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerName="dnsmasq-dns" Oct 03 13:11:48 crc kubenswrapper[4868]: E1003 13:11:48.430361 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29d1fa62-1c44-47f6-9c4c-c9023d4ef342" containerName="neutron-db-sync" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.430369 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="29d1fa62-1c44-47f6-9c4c-c9023d4ef342" containerName="neutron-db-sync" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.430571 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="29d1fa62-1c44-47f6-9c4c-c9023d4ef342" containerName="neutron-db-sync" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.430595 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" containerName="dnsmasq-dns" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.431679 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.485340 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-kbv6c"] Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.514752 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.514860 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.514908 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.515021 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.515122 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-config\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.515158 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgdcw\" (UniqueName: \"kubernetes.io/projected/0e7dcbce-9519-46bf-a3b1-154a95f285f3-kube-api-access-jgdcw\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.579703 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2b92a9aa-964f-4783-b419-aa3e5776298f" path="/var/lib/kubelet/pods/2b92a9aa-964f-4783-b419-aa3e5776298f/volumes" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.584219 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cab73c7e-f1c2-413d-b8e9-f61e35ed604b" path="/var/lib/kubelet/pods/cab73c7e-f1c2-413d-b8e9-f61e35ed604b/volumes" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.585597 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d39f17f6-fcb8-49c5-a75a-aa971a2e381c" path="/var/lib/kubelet/pods/d39f17f6-fcb8-49c5-a75a-aa971a2e381c/volumes" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.615879 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-56b4dd546d-pngbh"] Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.618097 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.616489 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.625002 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-config\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.625205 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgdcw\" (UniqueName: \"kubernetes.io/projected/0e7dcbce-9519-46bf-a3b1-154a95f285f3-kube-api-access-jgdcw\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.625318 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.626185 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.626364 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.626890 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-config\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.628116 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.629765 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.630642 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.635229 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.635675 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.643623 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-gm7mg" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.643934 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.647463 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-56b4dd546d-pngbh"] Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.652824 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.657123 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgdcw\" (UniqueName: \"kubernetes.io/projected/0e7dcbce-9519-46bf-a3b1-154a95f285f3-kube-api-access-jgdcw\") pod \"dnsmasq-dns-5c9776ccc5-kbv6c\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.743334 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m67v\" (UniqueName: \"kubernetes.io/projected/39238e33-e64b-43b1-bb17-b81edeb45567-kube-api-access-6m67v\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.743428 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-config\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.743736 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-combined-ca-bundle\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.743763 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-ovndb-tls-certs\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.743908 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-httpd-config\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.758794 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.845495 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m67v\" (UniqueName: \"kubernetes.io/projected/39238e33-e64b-43b1-bb17-b81edeb45567-kube-api-access-6m67v\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.845982 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-config\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.851314 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-combined-ca-bundle\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.851681 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-ovndb-tls-certs\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.851824 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-httpd-config\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.856842 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-httpd-config\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.856917 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-ovndb-tls-certs\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.857003 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-combined-ca-bundle\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.857973 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-config\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:48 crc kubenswrapper[4868]: I1003 13:11:48.872954 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m67v\" (UniqueName: \"kubernetes.io/projected/39238e33-e64b-43b1-bb17-b81edeb45567-kube-api-access-6m67v\") pod \"neutron-56b4dd546d-pngbh\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.066689 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.240038 4868 generic.go:334] "Generic (PLEG): container finished" podID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerID="396e814dde3af88b22280b722f9c8b134d099dc87132ecb9b4f2cd0ec789ae1b" exitCode=0 Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.240241 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0064d985-a16a-4026-b65e-c2bd7b75ae6d","Type":"ContainerDied","Data":"396e814dde3af88b22280b722f9c8b134d099dc87132ecb9b4f2cd0ec789ae1b"} Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.240560 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.260870 4868 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="d39f17f6-fcb8-49c5-a75a-aa971a2e381c" podUID="cc1e34f6-e4f8-4287-a809-8f25736927d0" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.261522 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.377742 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-scripts\") pod \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.377828 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data\") pod \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.377873 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0064d985-a16a-4026-b65e-c2bd7b75ae6d-etc-machine-id\") pod \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.377909 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qxfn6\" (UniqueName: \"kubernetes.io/projected/0064d985-a16a-4026-b65e-c2bd7b75ae6d-kube-api-access-qxfn6\") pod \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.378009 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-combined-ca-bundle\") pod \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.378054 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data-custom\") pod \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\" (UID: \"0064d985-a16a-4026-b65e-c2bd7b75ae6d\") " Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.378248 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0064d985-a16a-4026-b65e-c2bd7b75ae6d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "0064d985-a16a-4026-b65e-c2bd7b75ae6d" (UID: "0064d985-a16a-4026-b65e-c2bd7b75ae6d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.378623 4868 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/0064d985-a16a-4026-b65e-c2bd7b75ae6d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.395787 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-scripts" (OuterVolumeSpecName: "scripts") pod "0064d985-a16a-4026-b65e-c2bd7b75ae6d" (UID: "0064d985-a16a-4026-b65e-c2bd7b75ae6d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.395921 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0064d985-a16a-4026-b65e-c2bd7b75ae6d-kube-api-access-qxfn6" (OuterVolumeSpecName: "kube-api-access-qxfn6") pod "0064d985-a16a-4026-b65e-c2bd7b75ae6d" (UID: "0064d985-a16a-4026-b65e-c2bd7b75ae6d"). InnerVolumeSpecName "kube-api-access-qxfn6". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.396039 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0064d985-a16a-4026-b65e-c2bd7b75ae6d" (UID: "0064d985-a16a-4026-b65e-c2bd7b75ae6d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.482247 4868 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data-custom\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.482291 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.482302 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qxfn6\" (UniqueName: \"kubernetes.io/projected/0064d985-a16a-4026-b65e-c2bd7b75ae6d-kube-api-access-qxfn6\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.492534 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-kbv6c"] Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.517345 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0064d985-a16a-4026-b65e-c2bd7b75ae6d" (UID: "0064d985-a16a-4026-b65e-c2bd7b75ae6d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.554119 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data" (OuterVolumeSpecName: "config-data") pod "0064d985-a16a-4026-b65e-c2bd7b75ae6d" (UID: "0064d985-a16a-4026-b65e-c2bd7b75ae6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.584503 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.584554 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0064d985-a16a-4026-b65e-c2bd7b75ae6d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:11:49 crc kubenswrapper[4868]: I1003 13:11:49.771153 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-56b4dd546d-pngbh"] Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.263530 4868 generic.go:334] "Generic (PLEG): container finished" podID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerID="e3e725c7e7082ecfc524026ffedc632fe31dabb13cbea45c89ba3e8c6a5adc3c" exitCode=0 Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.263635 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" event={"ID":"0e7dcbce-9519-46bf-a3b1-154a95f285f3","Type":"ContainerDied","Data":"e3e725c7e7082ecfc524026ffedc632fe31dabb13cbea45c89ba3e8c6a5adc3c"} Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.263668 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" event={"ID":"0e7dcbce-9519-46bf-a3b1-154a95f285f3","Type":"ContainerStarted","Data":"1eb01ee7bd7e93957145656dff64aafcc85311e32d73d0dc6b62f485dffac8d7"} Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.273903 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b4dd546d-pngbh" event={"ID":"39238e33-e64b-43b1-bb17-b81edeb45567","Type":"ContainerStarted","Data":"7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d"} Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.274259 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b4dd546d-pngbh" event={"ID":"39238e33-e64b-43b1-bb17-b81edeb45567","Type":"ContainerStarted","Data":"260c10a1ae01d0fef1eea44fee5a4a441f978f3b1b8bd3af78adb25e78e2cfb8"} Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.278936 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"0064d985-a16a-4026-b65e-c2bd7b75ae6d","Type":"ContainerDied","Data":"cc41a2bae163d8fdaec41413ababf41a8ade0f94bd1d50cc2487765fd4208805"} Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.279000 4868 scope.go:117] "RemoveContainer" containerID="931d65fb2f90c9baadafa1bdbe76dc5ca8716dce3291425ecc633acffd20942f" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.279177 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.328937 4868 scope.go:117] "RemoveContainer" containerID="396e814dde3af88b22280b722f9c8b134d099dc87132ecb9b4f2cd0ec789ae1b" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.358178 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.385558 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.407628 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:50 crc kubenswrapper[4868]: E1003 13:11:50.408821 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="cinder-scheduler" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.409058 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="cinder-scheduler" Oct 03 13:11:50 crc kubenswrapper[4868]: E1003 13:11:50.409192 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="probe" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.409267 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="probe" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.409674 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="cinder-scheduler" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.409800 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" containerName="probe" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.411681 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.418531 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.459991 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.507889 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.507945 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.507980 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-scripts\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.508029 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-config-data\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.508107 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c595ad51-042d-4867-9db2-68166545d242-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.508160 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7256r\" (UniqueName: \"kubernetes.io/projected/c595ad51-042d-4867-9db2-68166545d242-kube-api-access-7256r\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.574485 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0064d985-a16a-4026-b65e-c2bd7b75ae6d" path="/var/lib/kubelet/pods/0064d985-a16a-4026-b65e-c2bd7b75ae6d/volumes" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.611201 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7256r\" (UniqueName: \"kubernetes.io/projected/c595ad51-042d-4867-9db2-68166545d242-kube-api-access-7256r\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.611280 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.611308 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.611340 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-scripts\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.611381 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-config-data\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.611429 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c595ad51-042d-4867-9db2-68166545d242-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.611520 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c595ad51-042d-4867-9db2-68166545d242-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.619370 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.619612 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-config-data\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.620568 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-scripts\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.640756 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7256r\" (UniqueName: \"kubernetes.io/projected/c595ad51-042d-4867-9db2-68166545d242-kube-api-access-7256r\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.641107 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c595ad51-042d-4867-9db2-68166545d242-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"c595ad51-042d-4867-9db2-68166545d242\") " pod="openstack/cinder-scheduler-0" Oct 03 13:11:50 crc kubenswrapper[4868]: I1003 13:11:50.780187 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.302991 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" event={"ID":"0e7dcbce-9519-46bf-a3b1-154a95f285f3","Type":"ContainerStarted","Data":"6b88c9421990d1b6b61feb967f45e639b0ae753850fb5b836b62a15967606c3e"} Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.303448 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.306214 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b4dd546d-pngbh" event={"ID":"39238e33-e64b-43b1-bb17-b81edeb45567","Type":"ContainerStarted","Data":"5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97"} Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.306359 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.327850 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" podStartSLOduration=3.327792602 podStartE2EDuration="3.327792602s" podCreationTimestamp="2025-10-03 13:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:51.324244888 +0000 UTC m=+1307.534093974" watchObservedRunningTime="2025-10-03 13:11:51.327792602 +0000 UTC m=+1307.537641668" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.369875 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-56b4dd546d-pngbh" podStartSLOduration=3.36984875 podStartE2EDuration="3.36984875s" podCreationTimestamp="2025-10-03 13:11:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:51.35307328 +0000 UTC m=+1307.562922366" watchObservedRunningTime="2025-10-03 13:11:51.36984875 +0000 UTC m=+1307.579697826" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.397429 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Oct 03 13:11:51 crc kubenswrapper[4868]: W1003 13:11:51.400958 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc595ad51_042d_4867_9db2_68166545d242.slice/crio-2c0d71569d0ab86df96da86cdc5cd0eb192b810461bb6ed4cf848c7e60a91662 WatchSource:0}: Error finding container 2c0d71569d0ab86df96da86cdc5cd0eb192b810461bb6ed4cf848c7e60a91662: Status 404 returned error can't find the container with id 2c0d71569d0ab86df96da86cdc5cd0eb192b810461bb6ed4cf848c7e60a91662 Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.699945 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-cfd6858f-9jn5h"] Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.704798 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.736235 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.736364 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.752485 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cfd6858f-9jn5h"] Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.841981 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-combined-ca-bundle\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.842044 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-ovndb-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.842135 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-public-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.842157 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-internal-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.842208 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-httpd-config\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.842263 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pvgrx\" (UniqueName: \"kubernetes.io/projected/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-kube-api-access-pvgrx\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.842288 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-config\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.948676 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-public-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.949254 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-internal-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.949937 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-httpd-config\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.950010 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pvgrx\" (UniqueName: \"kubernetes.io/projected/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-kube-api-access-pvgrx\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.950038 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-config\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.950128 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-combined-ca-bundle\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.950181 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-ovndb-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.955729 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-public-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.957021 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-combined-ca-bundle\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.959828 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-config\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.959957 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-ovndb-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.963014 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-httpd-config\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.973930 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-internal-tls-certs\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:51 crc kubenswrapper[4868]: I1003 13:11:51.974827 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pvgrx\" (UniqueName: \"kubernetes.io/projected/2036b8f4-7d6a-46eb-9eb8-3d9827c878be-kube-api-access-pvgrx\") pod \"neutron-cfd6858f-9jn5h\" (UID: \"2036b8f4-7d6a-46eb-9eb8-3d9827c878be\") " pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:52 crc kubenswrapper[4868]: I1003 13:11:52.082902 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:52 crc kubenswrapper[4868]: I1003 13:11:52.369045 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c595ad51-042d-4867-9db2-68166545d242","Type":"ContainerStarted","Data":"3ddd8608d8a1371a389d854c6500635ead3a6230af30ec8ef03424cd7eb200b8"} Oct 03 13:11:52 crc kubenswrapper[4868]: I1003 13:11:52.369135 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c595ad51-042d-4867-9db2-68166545d242","Type":"ContainerStarted","Data":"2c0d71569d0ab86df96da86cdc5cd0eb192b810461bb6ed4cf848c7e60a91662"} Oct 03 13:11:52 crc kubenswrapper[4868]: I1003 13:11:52.712344 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-cfd6858f-9jn5h"] Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.216447 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-75c4dd668c-xqjsj"] Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.228739 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.240616 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.240866 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.241075 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.253761 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-75c4dd668c-xqjsj"] Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.283692 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-run-httpd\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.283803 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-log-httpd\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.283870 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-public-tls-certs\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.283911 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-config-data\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.283947 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-etc-swift\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.284007 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-combined-ca-bundle\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.284127 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-internal-tls-certs\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.284184 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vzmxj\" (UniqueName: \"kubernetes.io/projected/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-kube-api-access-vzmxj\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388104 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-run-httpd\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388145 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-log-httpd\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388207 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-public-tls-certs\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388245 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-config-data\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388268 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-etc-swift\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388334 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-combined-ca-bundle\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388359 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-internal-tls-certs\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.388383 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vzmxj\" (UniqueName: \"kubernetes.io/projected/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-kube-api-access-vzmxj\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.389449 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-run-httpd\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.389676 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-log-httpd\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.407437 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-config-data\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.417270 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-combined-ca-bundle\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.417977 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-public-tls-certs\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.424014 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-internal-tls-certs\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.424490 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-etc-swift\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.429392 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cfd6858f-9jn5h" event={"ID":"2036b8f4-7d6a-46eb-9eb8-3d9827c878be","Type":"ContainerStarted","Data":"8542444a4030425b90b9079ce55e3cda78ecaaf4220c4778b6791bb4d626e7fb"} Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.429460 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cfd6858f-9jn5h" event={"ID":"2036b8f4-7d6a-46eb-9eb8-3d9827c878be","Type":"ContainerStarted","Data":"ec52ecd2c98265bcbb6437fe823a45632074c51d810e2158f0051486dc2ae1d7"} Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.437622 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vzmxj\" (UniqueName: \"kubernetes.io/projected/18a5e2cd-7517-4ef9-ab47-f4236b4bb836-kube-api-access-vzmxj\") pod \"swift-proxy-75c4dd668c-xqjsj\" (UID: \"18a5e2cd-7517-4ef9-ab47-f4236b4bb836\") " pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:53 crc kubenswrapper[4868]: I1003 13:11:53.700836 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.398711 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-75c4dd668c-xqjsj"] Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.446151 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"c595ad51-042d-4867-9db2-68166545d242","Type":"ContainerStarted","Data":"5e52972be4d3f7ed48578437b83f824d2825a474acefea4d7d4721f793fd05bb"} Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.452135 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-75c4dd668c-xqjsj" event={"ID":"18a5e2cd-7517-4ef9-ab47-f4236b4bb836","Type":"ContainerStarted","Data":"dd79285ece0b53ea72fb726e296806ca4f9d6aa21314cad6d42a634d3ac1279c"} Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.455473 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-cfd6858f-9jn5h" event={"ID":"2036b8f4-7d6a-46eb-9eb8-3d9827c878be","Type":"ContainerStarted","Data":"e7d5255bfa2aae7ed869c9a26f936f466761a048844de3c0f7ff3ea14191786c"} Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.455704 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.476379 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.476359562 podStartE2EDuration="4.476359562s" podCreationTimestamp="2025-10-03 13:11:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:54.47330837 +0000 UTC m=+1310.683157436" watchObservedRunningTime="2025-10-03 13:11:54.476359562 +0000 UTC m=+1310.686208628" Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.561671 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-cfd6858f-9jn5h" podStartSLOduration=3.561640458 podStartE2EDuration="3.561640458s" podCreationTimestamp="2025-10-03 13:11:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:54.510377963 +0000 UTC m=+1310.720227029" watchObservedRunningTime="2025-10-03 13:11:54.561640458 +0000 UTC m=+1310.771489524" Oct 03 13:11:54 crc kubenswrapper[4868]: I1003 13:11:54.867975 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-545874b5c8-jnl6d" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 03 13:11:55 crc kubenswrapper[4868]: I1003 13:11:55.491386 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-75c4dd668c-xqjsj" event={"ID":"18a5e2cd-7517-4ef9-ab47-f4236b4bb836","Type":"ContainerStarted","Data":"d85403af3138df04cd0c990995c0d483485de38547f2f7278645bd5b64c5fb75"} Oct 03 13:11:55 crc kubenswrapper[4868]: I1003 13:11:55.492088 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-75c4dd668c-xqjsj" event={"ID":"18a5e2cd-7517-4ef9-ab47-f4236b4bb836","Type":"ContainerStarted","Data":"a2621f634dca5b1d88b88f45299895b46900b7e5efad5abd5e2d9f4f1a0e6bd7"} Oct 03 13:11:55 crc kubenswrapper[4868]: I1003 13:11:55.493598 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:55 crc kubenswrapper[4868]: I1003 13:11:55.493630 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:11:55 crc kubenswrapper[4868]: I1003 13:11:55.534839 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-75c4dd668c-xqjsj" podStartSLOduration=2.534819694 podStartE2EDuration="2.534819694s" podCreationTimestamp="2025-10-03 13:11:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:11:55.526858651 +0000 UTC m=+1311.736707727" watchObservedRunningTime="2025-10-03 13:11:55.534819694 +0000 UTC m=+1311.744668760" Oct 03 13:11:55 crc kubenswrapper[4868]: I1003 13:11:55.781099 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.370781 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.627231 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.627666 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-central-agent" containerID="cri-o://95917e28c45d178908b8fe49ce481dd56fedd5af0b372b56f7a4795b9f6d4f30" gracePeriod=30 Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.629035 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="proxy-httpd" containerID="cri-o://f2f50ebd2d8572f1a78d7d015032d6d15e38fcd128b69feec11787b9ddd20804" gracePeriod=30 Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.629153 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="sg-core" containerID="cri-o://20adf87d55d5cb46fabd0aca71d56766786e667ea51f5978b7790304a0e741af" gracePeriod=30 Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.629210 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-notification-agent" containerID="cri-o://2a7be5d6ca6f99210aa1d94cef45cad0864ec83b62a13e75be4890be463ba43f" gracePeriod=30 Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.629282 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:11:56 crc kubenswrapper[4868]: I1003 13:11:56.685328 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.157:3000/\": EOF" Oct 03 13:11:57 crc kubenswrapper[4868]: I1003 13:11:57.525369 4868 generic.go:334] "Generic (PLEG): container finished" podID="65fa5327-5088-4f07-9efd-3685ce55a528" containerID="f2f50ebd2d8572f1a78d7d015032d6d15e38fcd128b69feec11787b9ddd20804" exitCode=0 Oct 03 13:11:57 crc kubenswrapper[4868]: I1003 13:11:57.525410 4868 generic.go:334] "Generic (PLEG): container finished" podID="65fa5327-5088-4f07-9efd-3685ce55a528" containerID="20adf87d55d5cb46fabd0aca71d56766786e667ea51f5978b7790304a0e741af" exitCode=2 Oct 03 13:11:57 crc kubenswrapper[4868]: I1003 13:11:57.525417 4868 generic.go:334] "Generic (PLEG): container finished" podID="65fa5327-5088-4f07-9efd-3685ce55a528" containerID="95917e28c45d178908b8fe49ce481dd56fedd5af0b372b56f7a4795b9f6d4f30" exitCode=0 Oct 03 13:11:57 crc kubenswrapper[4868]: I1003 13:11:57.525500 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerDied","Data":"f2f50ebd2d8572f1a78d7d015032d6d15e38fcd128b69feec11787b9ddd20804"} Oct 03 13:11:57 crc kubenswrapper[4868]: I1003 13:11:57.525583 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerDied","Data":"20adf87d55d5cb46fabd0aca71d56766786e667ea51f5978b7790304a0e741af"} Oct 03 13:11:57 crc kubenswrapper[4868]: I1003 13:11:57.525599 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerDied","Data":"95917e28c45d178908b8fe49ce481dd56fedd5af0b372b56f7a4795b9f6d4f30"} Oct 03 13:11:58 crc kubenswrapper[4868]: I1003 13:11:58.761234 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:11:58 crc kubenswrapper[4868]: I1003 13:11:58.849607 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-542pn"] Oct 03 13:11:58 crc kubenswrapper[4868]: I1003 13:11:58.849896 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerName="dnsmasq-dns" containerID="cri-o://7a4fa0025558174ac3ffd01fde40998eefb52968f067932a847f32ba0da86b4d" gracePeriod=10 Oct 03 13:11:58 crc kubenswrapper[4868]: I1003 13:11:58.957994 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-wgb4q"] Oct 03 13:11:58 crc kubenswrapper[4868]: I1003 13:11:58.959547 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wgb4q" Oct 03 13:11:58 crc kubenswrapper[4868]: I1003 13:11:58.990615 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wgb4q"] Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.053866 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25p2w\" (UniqueName: \"kubernetes.io/projected/d96bdf5a-e79e-4746-aa96-32043e17fcb4-kube-api-access-25p2w\") pod \"nova-api-db-create-wgb4q\" (UID: \"d96bdf5a-e79e-4746-aa96-32043e17fcb4\") " pod="openstack/nova-api-db-create-wgb4q" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.057904 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-2rfdh"] Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.060170 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2rfdh" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.074418 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-2rfdh"] Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.157605 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25p2w\" (UniqueName: \"kubernetes.io/projected/d96bdf5a-e79e-4746-aa96-32043e17fcb4-kube-api-access-25p2w\") pod \"nova-api-db-create-wgb4q\" (UID: \"d96bdf5a-e79e-4746-aa96-32043e17fcb4\") " pod="openstack/nova-api-db-create-wgb4q" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.157769 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8ldj\" (UniqueName: \"kubernetes.io/projected/014e693d-4546-4fdf-b800-2d4263f9aedf-kube-api-access-h8ldj\") pod \"nova-cell0-db-create-2rfdh\" (UID: \"014e693d-4546-4fdf-b800-2d4263f9aedf\") " pod="openstack/nova-cell0-db-create-2rfdh" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.183480 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25p2w\" (UniqueName: \"kubernetes.io/projected/d96bdf5a-e79e-4746-aa96-32043e17fcb4-kube-api-access-25p2w\") pod \"nova-api-db-create-wgb4q\" (UID: \"d96bdf5a-e79e-4746-aa96-32043e17fcb4\") " pod="openstack/nova-api-db-create-wgb4q" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.264705 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8ldj\" (UniqueName: \"kubernetes.io/projected/014e693d-4546-4fdf-b800-2d4263f9aedf-kube-api-access-h8ldj\") pod \"nova-cell0-db-create-2rfdh\" (UID: \"014e693d-4546-4fdf-b800-2d4263f9aedf\") " pod="openstack/nova-cell0-db-create-2rfdh" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.282641 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-vv67p"] Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.284015 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vv67p" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.286611 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-vv67p"] Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.304043 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wgb4q" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.360800 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8ldj\" (UniqueName: \"kubernetes.io/projected/014e693d-4546-4fdf-b800-2d4263f9aedf-kube-api-access-h8ldj\") pod \"nova-cell0-db-create-2rfdh\" (UID: \"014e693d-4546-4fdf-b800-2d4263f9aedf\") " pod="openstack/nova-cell0-db-create-2rfdh" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.366738 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gms5k\" (UniqueName: \"kubernetes.io/projected/ddf7e94d-f6c1-450a-8ef1-8729c1f078ba-kube-api-access-gms5k\") pod \"nova-cell1-db-create-vv67p\" (UID: \"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba\") " pod="openstack/nova-cell1-db-create-vv67p" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.400031 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2rfdh" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.468569 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gms5k\" (UniqueName: \"kubernetes.io/projected/ddf7e94d-f6c1-450a-8ef1-8729c1f078ba-kube-api-access-gms5k\") pod \"nova-cell1-db-create-vv67p\" (UID: \"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba\") " pod="openstack/nova-cell1-db-create-vv67p" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.502847 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gms5k\" (UniqueName: \"kubernetes.io/projected/ddf7e94d-f6c1-450a-8ef1-8729c1f078ba-kube-api-access-gms5k\") pod \"nova-cell1-db-create-vv67p\" (UID: \"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba\") " pod="openstack/nova-cell1-db-create-vv67p" Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.560295 4868 generic.go:334] "Generic (PLEG): container finished" podID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerID="7a4fa0025558174ac3ffd01fde40998eefb52968f067932a847f32ba0da86b4d" exitCode=0 Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.560350 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" event={"ID":"feae4f75-0fab-4d6a-ad00-90c66140d20d","Type":"ContainerDied","Data":"7a4fa0025558174ac3ffd01fde40998eefb52968f067932a847f32ba0da86b4d"} Oct 03 13:11:59 crc kubenswrapper[4868]: I1003 13:11:59.614142 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vv67p" Oct 03 13:12:01 crc kubenswrapper[4868]: I1003 13:12:01.088884 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Oct 03 13:12:01 crc kubenswrapper[4868]: I1003 13:12:01.329883 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:12:01 crc kubenswrapper[4868]: I1003 13:12:01.330190 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-log" containerID="cri-o://a928878d0716c98ea372d72b83afdbe97d0c7cb59ce6624f8e5b42f3852abba1" gracePeriod=30 Oct 03 13:12:01 crc kubenswrapper[4868]: I1003 13:12:01.330885 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-httpd" containerID="cri-o://2d4f0f94fad4115cc12f547af93b872acfe38ceead6e181f6743e572015ed01f" gracePeriod=30 Oct 03 13:12:01 crc kubenswrapper[4868]: I1003 13:12:01.524907 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.159:5353: connect: connection refused" Oct 03 13:12:01 crc kubenswrapper[4868]: I1003 13:12:01.618497 4868 generic.go:334] "Generic (PLEG): container finished" podID="08fd5302-f657-4b15-ae77-4bac04d778da" containerID="a928878d0716c98ea372d72b83afdbe97d0c7cb59ce6624f8e5b42f3852abba1" exitCode=143 Oct 03 13:12:01 crc kubenswrapper[4868]: I1003 13:12:01.618589 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"08fd5302-f657-4b15-ae77-4bac04d778da","Type":"ContainerDied","Data":"a928878d0716c98ea372d72b83afdbe97d0c7cb59ce6624f8e5b42f3852abba1"} Oct 03 13:12:02 crc kubenswrapper[4868]: I1003 13:12:02.647363 4868 generic.go:334] "Generic (PLEG): container finished" podID="65fa5327-5088-4f07-9efd-3685ce55a528" containerID="2a7be5d6ca6f99210aa1d94cef45cad0864ec83b62a13e75be4890be463ba43f" exitCode=0 Oct 03 13:12:02 crc kubenswrapper[4868]: I1003 13:12:02.647415 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerDied","Data":"2a7be5d6ca6f99210aa1d94cef45cad0864ec83b62a13e75be4890be463ba43f"} Oct 03 13:12:03 crc kubenswrapper[4868]: I1003 13:12:03.029206 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.157:3000/\": dial tcp 10.217.0.157:3000: connect: connection refused" Oct 03 13:12:03 crc kubenswrapper[4868]: I1003 13:12:03.711735 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:12:03 crc kubenswrapper[4868]: I1003 13:12:03.719920 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-75c4dd668c-xqjsj" Oct 03 13:12:03 crc kubenswrapper[4868]: I1003 13:12:03.880065 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:12:03 crc kubenswrapper[4868]: I1003 13:12:03.909781 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-5786fc7ff8-jg85w" Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.306002 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.307235 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-httpd" containerID="cri-o://3f1a89735a0e7af92e496d4a81f856bf15e7c9e6b7f2753eddd5b930229582f1" gracePeriod=30 Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.307635 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-log" containerID="cri-o://8c742643c1b94b2d18ca94d96073fa89187103db0d375ac70a0e04bbdcd76d7b" gracePeriod=30 Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.665546 4868 generic.go:334] "Generic (PLEG): container finished" podID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerID="8c742643c1b94b2d18ca94d96073fa89187103db0d375ac70a0e04bbdcd76d7b" exitCode=143 Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.665636 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7ec71cd0-6a74-4cee-9739-f26b70c9716f","Type":"ContainerDied","Data":"8c742643c1b94b2d18ca94d96073fa89187103db0d375ac70a0e04bbdcd76d7b"} Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.668355 4868 generic.go:334] "Generic (PLEG): container finished" podID="08fd5302-f657-4b15-ae77-4bac04d778da" containerID="2d4f0f94fad4115cc12f547af93b872acfe38ceead6e181f6743e572015ed01f" exitCode=0 Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.669376 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"08fd5302-f657-4b15-ae77-4bac04d778da","Type":"ContainerDied","Data":"2d4f0f94fad4115cc12f547af93b872acfe38ceead6e181f6743e572015ed01f"} Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.868559 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-545874b5c8-jnl6d" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.142:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.142:8443: connect: connection refused" Oct 03 13:12:04 crc kubenswrapper[4868]: I1003 13:12:04.868853 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.913218 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938043 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-log-httpd\") pod \"65fa5327-5088-4f07-9efd-3685ce55a528\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938461 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-config-data\") pod \"65fa5327-5088-4f07-9efd-3685ce55a528\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938511 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6x5d2\" (UniqueName: \"kubernetes.io/projected/65fa5327-5088-4f07-9efd-3685ce55a528-kube-api-access-6x5d2\") pod \"65fa5327-5088-4f07-9efd-3685ce55a528\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938635 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-sg-core-conf-yaml\") pod \"65fa5327-5088-4f07-9efd-3685ce55a528\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938656 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-scripts\") pod \"65fa5327-5088-4f07-9efd-3685ce55a528\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938721 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-combined-ca-bundle\") pod \"65fa5327-5088-4f07-9efd-3685ce55a528\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938751 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-run-httpd\") pod \"65fa5327-5088-4f07-9efd-3685ce55a528\" (UID: \"65fa5327-5088-4f07-9efd-3685ce55a528\") " Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.938836 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "65fa5327-5088-4f07-9efd-3685ce55a528" (UID: "65fa5327-5088-4f07-9efd-3685ce55a528"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.939283 4868 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.940542 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "65fa5327-5088-4f07-9efd-3685ce55a528" (UID: "65fa5327-5088-4f07-9efd-3685ce55a528"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.949156 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-scripts" (OuterVolumeSpecName: "scripts") pod "65fa5327-5088-4f07-9efd-3685ce55a528" (UID: "65fa5327-5088-4f07-9efd-3685ce55a528"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.950270 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/65fa5327-5088-4f07-9efd-3685ce55a528-kube-api-access-6x5d2" (OuterVolumeSpecName: "kube-api-access-6x5d2") pod "65fa5327-5088-4f07-9efd-3685ce55a528" (UID: "65fa5327-5088-4f07-9efd-3685ce55a528"). InnerVolumeSpecName "kube-api-access-6x5d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:05 crc kubenswrapper[4868]: I1003 13:12:05.977003 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "65fa5327-5088-4f07-9efd-3685ce55a528" (UID: "65fa5327-5088-4f07-9efd-3685ce55a528"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.041582 4868 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/65fa5327-5088-4f07-9efd-3685ce55a528-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.041764 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6x5d2\" (UniqueName: \"kubernetes.io/projected/65fa5327-5088-4f07-9efd-3685ce55a528-kube-api-access-6x5d2\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.041779 4868 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.041789 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.084321 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "65fa5327-5088-4f07-9efd-3685ce55a528" (UID: "65fa5327-5088-4f07-9efd-3685ce55a528"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.113925 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-config-data" (OuterVolumeSpecName: "config-data") pod "65fa5327-5088-4f07-9efd-3685ce55a528" (UID: "65fa5327-5088-4f07-9efd-3685ce55a528"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.143903 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.143945 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/65fa5327-5088-4f07-9efd-3685ce55a528-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.203456 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.207376 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247190 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-sb\") pod \"feae4f75-0fab-4d6a-ad00-90c66140d20d\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247263 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-httpd-run\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247303 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-public-tls-certs\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247346 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247412 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-config\") pod \"feae4f75-0fab-4d6a-ad00-90c66140d20d\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247437 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-config-data\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247471 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-swift-storage-0\") pod \"feae4f75-0fab-4d6a-ad00-90c66140d20d\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247531 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-svc\") pod \"feae4f75-0fab-4d6a-ad00-90c66140d20d\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247592 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-scripts\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247647 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-logs\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247674 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw2hh\" (UniqueName: \"kubernetes.io/projected/08fd5302-f657-4b15-ae77-4bac04d778da-kube-api-access-nw2hh\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247714 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-combined-ca-bundle\") pod \"08fd5302-f657-4b15-ae77-4bac04d778da\" (UID: \"08fd5302-f657-4b15-ae77-4bac04d778da\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247818 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8qf2v\" (UniqueName: \"kubernetes.io/projected/feae4f75-0fab-4d6a-ad00-90c66140d20d-kube-api-access-8qf2v\") pod \"feae4f75-0fab-4d6a-ad00-90c66140d20d\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.247855 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-nb\") pod \"feae4f75-0fab-4d6a-ad00-90c66140d20d\" (UID: \"feae4f75-0fab-4d6a-ad00-90c66140d20d\") " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.248187 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.248835 4868 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.249759 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-logs" (OuterVolumeSpecName: "logs") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.252504 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-scripts" (OuterVolumeSpecName: "scripts") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.253223 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.256677 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feae4f75-0fab-4d6a-ad00-90c66140d20d-kube-api-access-8qf2v" (OuterVolumeSpecName: "kube-api-access-8qf2v") pod "feae4f75-0fab-4d6a-ad00-90c66140d20d" (UID: "feae4f75-0fab-4d6a-ad00-90c66140d20d"). InnerVolumeSpecName "kube-api-access-8qf2v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.261830 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08fd5302-f657-4b15-ae77-4bac04d778da-kube-api-access-nw2hh" (OuterVolumeSpecName: "kube-api-access-nw2hh") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "kube-api-access-nw2hh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.354167 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8qf2v\" (UniqueName: \"kubernetes.io/projected/feae4f75-0fab-4d6a-ad00-90c66140d20d-kube-api-access-8qf2v\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.354241 4868 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.354256 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.354272 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/08fd5302-f657-4b15-ae77-4bac04d778da-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.354285 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw2hh\" (UniqueName: \"kubernetes.io/projected/08fd5302-f657-4b15-ae77-4bac04d778da-kube-api-access-nw2hh\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.363980 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-config" (OuterVolumeSpecName: "config") pod "feae4f75-0fab-4d6a-ad00-90c66140d20d" (UID: "feae4f75-0fab-4d6a-ad00-90c66140d20d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.380274 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-config-data" (OuterVolumeSpecName: "config-data") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.382158 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "feae4f75-0fab-4d6a-ad00-90c66140d20d" (UID: "feae4f75-0fab-4d6a-ad00-90c66140d20d"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.389869 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.403879 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "feae4f75-0fab-4d6a-ad00-90c66140d20d" (UID: "feae4f75-0fab-4d6a-ad00-90c66140d20d"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.406455 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "feae4f75-0fab-4d6a-ad00-90c66140d20d" (UID: "feae4f75-0fab-4d6a-ad00-90c66140d20d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.415575 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "feae4f75-0fab-4d6a-ad00-90c66140d20d" (UID: "feae4f75-0fab-4d6a-ad00-90c66140d20d"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.422004 4868 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.428589 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "08fd5302-f657-4b15-ae77-4bac04d778da" (UID: "08fd5302-f657-4b15-ae77-4bac04d778da"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455298 4868 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455342 4868 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455368 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455381 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455396 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455408 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455419 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/08fd5302-f657-4b15-ae77-4bac04d778da-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455430 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.455442 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/feae4f75-0fab-4d6a-ad00-90c66140d20d-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.682315 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-2rfdh"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.692150 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-wgb4q"] Oct 03 13:12:06 crc kubenswrapper[4868]: W1003 13:12:06.698740 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod014e693d_4546_4fdf_b800_2d4263f9aedf.slice/crio-d351952f96c13bc4c989480421cc75c57bc5db4fe39bb6e3a77398e8c908d1ea WatchSource:0}: Error finding container d351952f96c13bc4c989480421cc75c57bc5db4fe39bb6e3a77398e8c908d1ea: Status 404 returned error can't find the container with id d351952f96c13bc4c989480421cc75c57bc5db4fe39bb6e3a77398e8c908d1ea Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.702984 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-vv67p"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.753490 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vv67p" event={"ID":"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba","Type":"ContainerStarted","Data":"8895cdd788f6e04874b1b1a88cd3c6b95598ca6b5b126b540a52cdc30fe47b9c"} Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.755821 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wgb4q" event={"ID":"d96bdf5a-e79e-4746-aa96-32043e17fcb4","Type":"ContainerStarted","Data":"ee624042310730b154ef200279fa9dbe552a517a39ccda6907ce1c6f8b697f0c"} Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.757770 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2rfdh" event={"ID":"014e693d-4546-4fdf-b800-2d4263f9aedf","Type":"ContainerStarted","Data":"d351952f96c13bc4c989480421cc75c57bc5db4fe39bb6e3a77398e8c908d1ea"} Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.772204 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.773174 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-795f4db4bc-542pn" event={"ID":"feae4f75-0fab-4d6a-ad00-90c66140d20d","Type":"ContainerDied","Data":"66f701e4b0346bfcaf69ea9e00355bc146393a44bf4b0d84512606ae26b67a54"} Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.773307 4868 scope.go:117] "RemoveContainer" containerID="7a4fa0025558174ac3ffd01fde40998eefb52968f067932a847f32ba0da86b4d" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.791592 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"cc1e34f6-e4f8-4287-a809-8f25736927d0","Type":"ContainerStarted","Data":"dd51bcaa9ed5e0e36ed3d9209d4b2f75da18ba4aea63ee3a7cb94f1b0d07723e"} Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.803962 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"65fa5327-5088-4f07-9efd-3685ce55a528","Type":"ContainerDied","Data":"f16a9293a43b4082466426a69b2f44e0a0ce881c613a2087867ba8279bf8127d"} Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.804226 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.827760 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"08fd5302-f657-4b15-ae77-4bac04d778da","Type":"ContainerDied","Data":"1bedad5d2e96705ea29cc670de1fbf37e26b8a9089b0f7c05aea82fe6c2674ef"} Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.827904 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.859067 4868 scope.go:117] "RemoveContainer" containerID="fe377502de5861f60b56c5d678bbecc371716ec6d45ae2f6f7ae87f563433667" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.881486 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.367988479 podStartE2EDuration="19.881460856s" podCreationTimestamp="2025-10-03 13:11:47 +0000 UTC" firstStartedPulling="2025-10-03 13:11:48.099854386 +0000 UTC m=+1304.309703452" lastFinishedPulling="2025-10-03 13:12:05.613326763 +0000 UTC m=+1321.823175829" observedRunningTime="2025-10-03 13:12:06.826214685 +0000 UTC m=+1323.036063751" watchObservedRunningTime="2025-10-03 13:12:06.881460856 +0000 UTC m=+1323.091309922" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.893030 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-542pn"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.911640 4868 scope.go:117] "RemoveContainer" containerID="f2f50ebd2d8572f1a78d7d015032d6d15e38fcd128b69feec11787b9ddd20804" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.913874 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-795f4db4bc-542pn"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.926647 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.934005 4868 scope.go:117] "RemoveContainer" containerID="20adf87d55d5cb46fabd0aca71d56766786e667ea51f5978b7790304a0e741af" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.951394 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.968982 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.979032 4868 scope.go:117] "RemoveContainer" containerID="2a7be5d6ca6f99210aa1d94cef45cad0864ec83b62a13e75be4890be463ba43f" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.979512 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.988685 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991281 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerName="init" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991307 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerName="init" Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991322 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-log" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991328 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-log" Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991348 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-notification-agent" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991355 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-notification-agent" Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991363 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerName="dnsmasq-dns" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991369 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerName="dnsmasq-dns" Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991384 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-central-agent" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991390 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-central-agent" Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991404 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="proxy-httpd" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991411 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="proxy-httpd" Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991429 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="sg-core" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991436 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="sg-core" Oct 03 13:12:06 crc kubenswrapper[4868]: E1003 13:12:06.991441 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-httpd" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991448 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-httpd" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991672 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-httpd" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991696 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-notification-agent" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991708 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" containerName="glance-log" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991723 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="proxy-httpd" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991736 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="ceilometer-central-agent" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991752 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" containerName="dnsmasq-dns" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.991763 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" containerName="sg-core" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.995292 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:06 crc kubenswrapper[4868]: I1003 13:12:06.997420 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:06.999503 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:06.999592 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.000126 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.008331 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.009336 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.009396 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.026269 4868 scope.go:117] "RemoveContainer" containerID="95917e28c45d178908b8fe49ce481dd56fedd5af0b372b56f7a4795b9f6d4f30" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.027410 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.063875 4868 scope.go:117] "RemoveContainer" containerID="2d4f0f94fad4115cc12f547af93b872acfe38ceead6e181f6743e572015ed01f" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.090751 4868 scope.go:117] "RemoveContainer" containerID="a928878d0716c98ea372d72b83afdbe97d0c7cb59ce6624f8e5b42f3852abba1" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178576 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-log-httpd\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178664 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178699 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847afc40-b6d8-4b55-9101-11d808ae4961-logs\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178729 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178770 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178799 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/847afc40-b6d8-4b55-9101-11d808ae4961-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178829 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b47vf\" (UniqueName: \"kubernetes.io/projected/e192c06a-73b7-44d7-afdb-002c1c96db4e-kube-api-access-b47vf\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178849 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-config-data\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178892 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-run-httpd\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178938 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-scripts\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178970 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9msg2\" (UniqueName: \"kubernetes.io/projected/847afc40-b6d8-4b55-9101-11d808ae4961-kube-api-access-9msg2\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.178986 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-scripts\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.179007 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.179030 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-config-data\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.179048 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.280839 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-config-data\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.280888 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.280932 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-log-httpd\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.280959 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.280984 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847afc40-b6d8-4b55-9101-11d808ae4961-logs\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281001 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281032 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281074 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/847afc40-b6d8-4b55-9101-11d808ae4961-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281106 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b47vf\" (UniqueName: \"kubernetes.io/projected/e192c06a-73b7-44d7-afdb-002c1c96db4e-kube-api-access-b47vf\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281130 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-config-data\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281173 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-run-httpd\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281246 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-scripts\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281280 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9msg2\" (UniqueName: \"kubernetes.io/projected/847afc40-b6d8-4b55-9101-11d808ae4961-kube-api-access-9msg2\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281295 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-scripts\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281316 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.281697 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.282020 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-run-httpd\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.282107 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-log-httpd\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.282761 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/847afc40-b6d8-4b55-9101-11d808ae4961-logs\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.282785 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/847afc40-b6d8-4b55-9101-11d808ae4961-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.287187 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.287331 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-config-data\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.287880 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.297921 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.299579 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-scripts\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.300327 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-config-data\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.301511 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/847afc40-b6d8-4b55-9101-11d808ae4961-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.301830 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-scripts\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.306550 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b47vf\" (UniqueName: \"kubernetes.io/projected/e192c06a-73b7-44d7-afdb-002c1c96db4e-kube-api-access-b47vf\") pod \"ceilometer-0\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.306667 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9msg2\" (UniqueName: \"kubernetes.io/projected/847afc40-b6d8-4b55-9101-11d808ae4961-kube-api-access-9msg2\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.331754 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"847afc40-b6d8-4b55-9101-11d808ae4961\") " pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.337858 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.355425 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.844090 4868 generic.go:334] "Generic (PLEG): container finished" podID="d96bdf5a-e79e-4746-aa96-32043e17fcb4" containerID="96e367c82b3c051b5c49a603b1b287a7a91e185d415e2d301909b76b69139628" exitCode=0 Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.844168 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wgb4q" event={"ID":"d96bdf5a-e79e-4746-aa96-32043e17fcb4","Type":"ContainerDied","Data":"96e367c82b3c051b5c49a603b1b287a7a91e185d415e2d301909b76b69139628"} Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.847226 4868 generic.go:334] "Generic (PLEG): container finished" podID="014e693d-4546-4fdf-b800-2d4263f9aedf" containerID="36e74777225a08ea4c362b6de6751b600614c7b59ed6e55d102e121da56490a5" exitCode=0 Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.847275 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2rfdh" event={"ID":"014e693d-4546-4fdf-b800-2d4263f9aedf","Type":"ContainerDied","Data":"36e74777225a08ea4c362b6de6751b600614c7b59ed6e55d102e121da56490a5"} Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.856067 4868 generic.go:334] "Generic (PLEG): container finished" podID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerID="3f1a89735a0e7af92e496d4a81f856bf15e7c9e6b7f2753eddd5b930229582f1" exitCode=0 Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.856129 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7ec71cd0-6a74-4cee-9739-f26b70c9716f","Type":"ContainerDied","Data":"3f1a89735a0e7af92e496d4a81f856bf15e7c9e6b7f2753eddd5b930229582f1"} Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.882348 4868 generic.go:334] "Generic (PLEG): container finished" podID="ddf7e94d-f6c1-450a-8ef1-8729c1f078ba" containerID="06c7173f06c45cf9dbeceb400762c30c5ed8926f9f8546dcbdb240c743893616" exitCode=0 Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.882400 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vv67p" event={"ID":"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba","Type":"ContainerDied","Data":"06c7173f06c45cf9dbeceb400762c30c5ed8926f9f8546dcbdb240c743893616"} Oct 03 13:12:07 crc kubenswrapper[4868]: I1003 13:12:07.906400 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.149617 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.188229 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.305995 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-combined-ca-bundle\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306293 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-logs\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306336 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-internal-tls-certs\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306411 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-scripts\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306485 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306706 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-httpd-run\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306779 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-config-data\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306821 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rsgqj\" (UniqueName: \"kubernetes.io/projected/7ec71cd0-6a74-4cee-9739-f26b70c9716f-kube-api-access-rsgqj\") pod \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\" (UID: \"7ec71cd0-6a74-4cee-9739-f26b70c9716f\") " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.306939 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-logs" (OuterVolumeSpecName: "logs") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.307345 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.309191 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.312350 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-scripts" (OuterVolumeSpecName: "scripts") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.313621 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.314348 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7ec71cd0-6a74-4cee-9739-f26b70c9716f-kube-api-access-rsgqj" (OuterVolumeSpecName: "kube-api-access-rsgqj") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "kube-api-access-rsgqj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.357109 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.385815 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.396485 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-config-data" (OuterVolumeSpecName: "config-data") pod "7ec71cd0-6a74-4cee-9739-f26b70c9716f" (UID: "7ec71cd0-6a74-4cee-9739-f26b70c9716f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.409326 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.409394 4868 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.409406 4868 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/7ec71cd0-6a74-4cee-9739-f26b70c9716f-httpd-run\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.409415 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.409425 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rsgqj\" (UniqueName: \"kubernetes.io/projected/7ec71cd0-6a74-4cee-9739-f26b70c9716f-kube-api-access-rsgqj\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.409437 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.409446 4868 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7ec71cd0-6a74-4cee-9739-f26b70c9716f-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.447429 4868 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.511894 4868 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.558670 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08fd5302-f657-4b15-ae77-4bac04d778da" path="/var/lib/kubelet/pods/08fd5302-f657-4b15-ae77-4bac04d778da/volumes" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.559827 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="65fa5327-5088-4f07-9efd-3685ce55a528" path="/var/lib/kubelet/pods/65fa5327-5088-4f07-9efd-3685ce55a528/volumes" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.561474 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feae4f75-0fab-4d6a-ad00-90c66140d20d" path="/var/lib/kubelet/pods/feae4f75-0fab-4d6a-ad00-90c66140d20d/volumes" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.900792 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerStarted","Data":"573401a25d2bc26097783fafc8c3f785e0bdfae4976307180c358578db8da7ed"} Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.906758 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"847afc40-b6d8-4b55-9101-11d808ae4961","Type":"ContainerStarted","Data":"7cf0004518c9f1c0958d4560de5eb112bc35cfb68bb2818bfd006bc1fb11a3e0"} Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.906830 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"847afc40-b6d8-4b55-9101-11d808ae4961","Type":"ContainerStarted","Data":"5e96bf45ac46c2f5061e0c09c5592ec32a849dd3f132b7487b398216668415f7"} Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.911583 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.911705 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"7ec71cd0-6a74-4cee-9739-f26b70c9716f","Type":"ContainerDied","Data":"a7ebd8f1b62413c8c274b0b626189fe08682431421a95beb63ab5659795515e1"} Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.911924 4868 scope.go:117] "RemoveContainer" containerID="3f1a89735a0e7af92e496d4a81f856bf15e7c9e6b7f2753eddd5b930229582f1" Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.966337 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:12:08 crc kubenswrapper[4868]: I1003 13:12:08.992013 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.005143 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:12:09 crc kubenswrapper[4868]: E1003 13:12:09.005617 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-log" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.005637 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-log" Oct 03 13:12:09 crc kubenswrapper[4868]: E1003 13:12:09.005657 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-httpd" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.005663 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-httpd" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.005874 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-httpd" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.005893 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" containerName="glance-log" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.006909 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.010611 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.011016 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.014708 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.028514 4868 scope.go:117] "RemoveContainer" containerID="8c742643c1b94b2d18ca94d96073fa89187103db0d375ac70a0e04bbdcd76d7b" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131585 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131637 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131699 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131728 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131751 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed50e7e6-260d-4ac7-99cf-cf935ec77577-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131818 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed50e7e6-260d-4ac7-99cf-cf935ec77577-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131847 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.131888 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfml8\" (UniqueName: \"kubernetes.io/projected/ed50e7e6-260d-4ac7-99cf-cf935ec77577-kube-api-access-jfml8\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234481 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234610 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234651 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234689 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed50e7e6-260d-4ac7-99cf-cf935ec77577-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234762 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed50e7e6-260d-4ac7-99cf-cf935ec77577-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234794 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234838 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfml8\" (UniqueName: \"kubernetes.io/projected/ed50e7e6-260d-4ac7-99cf-cf935ec77577-kube-api-access-jfml8\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.234968 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.235744 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.236294 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed50e7e6-260d-4ac7-99cf-cf935ec77577-logs\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.241444 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ed50e7e6-260d-4ac7-99cf-cf935ec77577-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.245100 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.247914 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.247940 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.259728 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed50e7e6-260d-4ac7-99cf-cf935ec77577-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.270969 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfml8\" (UniqueName: \"kubernetes.io/projected/ed50e7e6-260d-4ac7-99cf-cf935ec77577-kube-api-access-jfml8\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.325796 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ed50e7e6-260d-4ac7-99cf-cf935ec77577\") " pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.345530 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.394813 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vv67p" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.551664 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gms5k\" (UniqueName: \"kubernetes.io/projected/ddf7e94d-f6c1-450a-8ef1-8729c1f078ba-kube-api-access-gms5k\") pod \"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba\" (UID: \"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba\") " Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.559266 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddf7e94d-f6c1-450a-8ef1-8729c1f078ba-kube-api-access-gms5k" (OuterVolumeSpecName: "kube-api-access-gms5k") pod "ddf7e94d-f6c1-450a-8ef1-8729c1f078ba" (UID: "ddf7e94d-f6c1-450a-8ef1-8729c1f078ba"). InnerVolumeSpecName "kube-api-access-gms5k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.657897 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gms5k\" (UniqueName: \"kubernetes.io/projected/ddf7e94d-f6c1-450a-8ef1-8729c1f078ba-kube-api-access-gms5k\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.811961 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wgb4q" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.835932 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2rfdh" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.933124 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-wgb4q" event={"ID":"d96bdf5a-e79e-4746-aa96-32043e17fcb4","Type":"ContainerDied","Data":"ee624042310730b154ef200279fa9dbe552a517a39ccda6907ce1c6f8b697f0c"} Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.933376 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ee624042310730b154ef200279fa9dbe552a517a39ccda6907ce1c6f8b697f0c" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.933158 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-wgb4q" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.935730 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-vv67p" event={"ID":"ddf7e94d-f6c1-450a-8ef1-8729c1f078ba","Type":"ContainerDied","Data":"8895cdd788f6e04874b1b1a88cd3c6b95598ca6b5b126b540a52cdc30fe47b9c"} Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.935793 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8895cdd788f6e04874b1b1a88cd3c6b95598ca6b5b126b540a52cdc30fe47b9c" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.935746 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-vv67p" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.937657 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-2rfdh" event={"ID":"014e693d-4546-4fdf-b800-2d4263f9aedf","Type":"ContainerDied","Data":"d351952f96c13bc4c989480421cc75c57bc5db4fe39bb6e3a77398e8c908d1ea"} Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.937685 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d351952f96c13bc4c989480421cc75c57bc5db4fe39bb6e3a77398e8c908d1ea" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.937727 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-2rfdh" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.941131 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerStarted","Data":"84cedba432c2e66e583f58046af47ce7e5662a7cead40e165a55c289cc880a2e"} Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.969879 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25p2w\" (UniqueName: \"kubernetes.io/projected/d96bdf5a-e79e-4746-aa96-32043e17fcb4-kube-api-access-25p2w\") pod \"d96bdf5a-e79e-4746-aa96-32043e17fcb4\" (UID: \"d96bdf5a-e79e-4746-aa96-32043e17fcb4\") " Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.970008 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8ldj\" (UniqueName: \"kubernetes.io/projected/014e693d-4546-4fdf-b800-2d4263f9aedf-kube-api-access-h8ldj\") pod \"014e693d-4546-4fdf-b800-2d4263f9aedf\" (UID: \"014e693d-4546-4fdf-b800-2d4263f9aedf\") " Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.979887 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/014e693d-4546-4fdf-b800-2d4263f9aedf-kube-api-access-h8ldj" (OuterVolumeSpecName: "kube-api-access-h8ldj") pod "014e693d-4546-4fdf-b800-2d4263f9aedf" (UID: "014e693d-4546-4fdf-b800-2d4263f9aedf"). InnerVolumeSpecName "kube-api-access-h8ldj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:09 crc kubenswrapper[4868]: I1003 13:12:09.980824 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d96bdf5a-e79e-4746-aa96-32043e17fcb4-kube-api-access-25p2w" (OuterVolumeSpecName: "kube-api-access-25p2w") pod "d96bdf5a-e79e-4746-aa96-32043e17fcb4" (UID: "d96bdf5a-e79e-4746-aa96-32043e17fcb4"). InnerVolumeSpecName "kube-api-access-25p2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.072151 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25p2w\" (UniqueName: \"kubernetes.io/projected/d96bdf5a-e79e-4746-aa96-32043e17fcb4-kube-api-access-25p2w\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.072196 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8ldj\" (UniqueName: \"kubernetes.io/projected/014e693d-4546-4fdf-b800-2d4263f9aedf-kube-api-access-h8ldj\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:10 crc kubenswrapper[4868]: W1003 13:12:10.191871 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded50e7e6_260d_4ac7_99cf_cf935ec77577.slice/crio-136ce4dede503912fa4365e1d8d8004cbb4e35e9b9360c69d86b6ce70fd5d5cb WatchSource:0}: Error finding container 136ce4dede503912fa4365e1d8d8004cbb4e35e9b9360c69d86b6ce70fd5d5cb: Status 404 returned error can't find the container with id 136ce4dede503912fa4365e1d8d8004cbb4e35e9b9360c69d86b6ce70fd5d5cb Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.196413 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.567215 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7ec71cd0-6a74-4cee-9739-f26b70c9716f" path="/var/lib/kubelet/pods/7ec71cd0-6a74-4cee-9739-f26b70c9716f/volumes" Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.958830 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerStarted","Data":"d8d95b978decdc1b6d22ac0c512fe4488fb7d9788b4aa34f6a160b09d48829a5"} Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.959376 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerStarted","Data":"3d44132a504a043606ee71b4269ea98fbd7bc828cc9cdcf80cbe91c64540c974"} Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.963819 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed50e7e6-260d-4ac7-99cf-cf935ec77577","Type":"ContainerStarted","Data":"136ce4dede503912fa4365e1d8d8004cbb4e35e9b9360c69d86b6ce70fd5d5cb"} Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.968260 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"847afc40-b6d8-4b55-9101-11d808ae4961","Type":"ContainerStarted","Data":"a298950128290ddbe942c95fa9ad896cdaec086ea410e04e04e9c2d7f455f2f6"} Oct 03 13:12:10 crc kubenswrapper[4868]: I1003 13:12:10.990208 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.990186302 podStartE2EDuration="4.990186302s" podCreationTimestamp="2025-10-03 13:12:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:12:10.989157855 +0000 UTC m=+1327.199006931" watchObservedRunningTime="2025-10-03 13:12:10.990186302 +0000 UTC m=+1327.200035368" Oct 03 13:12:11 crc kubenswrapper[4868]: I1003 13:12:11.981256 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed50e7e6-260d-4ac7-99cf-cf935ec77577","Type":"ContainerStarted","Data":"18837105aa4bee3b1eb8c40e23503b155e978f267c475204a267710800de0266"} Oct 03 13:12:11 crc kubenswrapper[4868]: I1003 13:12:11.981880 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ed50e7e6-260d-4ac7-99cf-cf935ec77577","Type":"ContainerStarted","Data":"025f9d6c58d5cd63243abeb6a874861790876ea15c649000007e8b9dd0f61bfd"} Oct 03 13:12:12 crc kubenswrapper[4868]: I1003 13:12:12.013791 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.01376407 podStartE2EDuration="4.01376407s" podCreationTimestamp="2025-10-03 13:12:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:12:12.010452871 +0000 UTC m=+1328.220301947" watchObservedRunningTime="2025-10-03 13:12:12.01376407 +0000 UTC m=+1328.223613136" Oct 03 13:12:12 crc kubenswrapper[4868]: I1003 13:12:12.436897 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:12 crc kubenswrapper[4868]: I1003 13:12:12.925969 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.012529 4868 generic.go:334] "Generic (PLEG): container finished" podID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerID="6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9" exitCode=137 Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.012602 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-545874b5c8-jnl6d" event={"ID":"a0820dcb-cd35-41c2-8977-7d999feab9b2","Type":"ContainerDied","Data":"6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9"} Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.014255 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-545874b5c8-jnl6d" event={"ID":"a0820dcb-cd35-41c2-8977-7d999feab9b2","Type":"ContainerDied","Data":"c512263a0aa15c65bbe4ccf2b608c270a37c26f6068b89d690a60f079f1cc65a"} Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.014348 4868 scope.go:117] "RemoveContainer" containerID="88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.012632 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-545874b5c8-jnl6d" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.019620 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerStarted","Data":"800da94e35e2afd56472e53f231865f9e1fd639c9e44d7a684f5c583d3a882bb"} Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.019769 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-central-agent" containerID="cri-o://84cedba432c2e66e583f58046af47ce7e5662a7cead40e165a55c289cc880a2e" gracePeriod=30 Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.019885 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-notification-agent" containerID="cri-o://3d44132a504a043606ee71b4269ea98fbd7bc828cc9cdcf80cbe91c64540c974" gracePeriod=30 Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.019880 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="sg-core" containerID="cri-o://d8d95b978decdc1b6d22ac0c512fe4488fb7d9788b4aa34f6a160b09d48829a5" gracePeriod=30 Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.020075 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="proxy-httpd" containerID="cri-o://800da94e35e2afd56472e53f231865f9e1fd639c9e44d7a684f5c583d3a882bb" gracePeriod=30 Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.021441 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.045360 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c58j5\" (UniqueName: \"kubernetes.io/projected/a0820dcb-cd35-41c2-8977-7d999feab9b2-kube-api-access-c58j5\") pod \"a0820dcb-cd35-41c2-8977-7d999feab9b2\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.045580 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0820dcb-cd35-41c2-8977-7d999feab9b2-logs\") pod \"a0820dcb-cd35-41c2-8977-7d999feab9b2\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.045689 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-secret-key\") pod \"a0820dcb-cd35-41c2-8977-7d999feab9b2\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.045797 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-combined-ca-bundle\") pod \"a0820dcb-cd35-41c2-8977-7d999feab9b2\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.045984 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-scripts\") pod \"a0820dcb-cd35-41c2-8977-7d999feab9b2\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.046257 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-config-data\") pod \"a0820dcb-cd35-41c2-8977-7d999feab9b2\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.046300 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0820dcb-cd35-41c2-8977-7d999feab9b2-logs" (OuterVolumeSpecName: "logs") pod "a0820dcb-cd35-41c2-8977-7d999feab9b2" (UID: "a0820dcb-cd35-41c2-8977-7d999feab9b2"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.046404 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-tls-certs\") pod \"a0820dcb-cd35-41c2-8977-7d999feab9b2\" (UID: \"a0820dcb-cd35-41c2-8977-7d999feab9b2\") " Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.053116 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0820dcb-cd35-41c2-8977-7d999feab9b2-kube-api-access-c58j5" (OuterVolumeSpecName: "kube-api-access-c58j5") pod "a0820dcb-cd35-41c2-8977-7d999feab9b2" (UID: "a0820dcb-cd35-41c2-8977-7d999feab9b2"). InnerVolumeSpecName "kube-api-access-c58j5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.054382 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c58j5\" (UniqueName: \"kubernetes.io/projected/a0820dcb-cd35-41c2-8977-7d999feab9b2-kube-api-access-c58j5\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.054533 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a0820dcb-cd35-41c2-8977-7d999feab9b2-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.063085 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "a0820dcb-cd35-41c2-8977-7d999feab9b2" (UID: "a0820dcb-cd35-41c2-8977-7d999feab9b2"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.063694 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.64134032 podStartE2EDuration="7.063675373s" podCreationTimestamp="2025-10-03 13:12:06 +0000 UTC" firstStartedPulling="2025-10-03 13:12:07.914885897 +0000 UTC m=+1324.124734963" lastFinishedPulling="2025-10-03 13:12:12.33722094 +0000 UTC m=+1328.547070016" observedRunningTime="2025-10-03 13:12:13.052645407 +0000 UTC m=+1329.262494483" watchObservedRunningTime="2025-10-03 13:12:13.063675373 +0000 UTC m=+1329.273524439" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.085160 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a0820dcb-cd35-41c2-8977-7d999feab9b2" (UID: "a0820dcb-cd35-41c2-8977-7d999feab9b2"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.095697 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-config-data" (OuterVolumeSpecName: "config-data") pod "a0820dcb-cd35-41c2-8977-7d999feab9b2" (UID: "a0820dcb-cd35-41c2-8977-7d999feab9b2"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.096260 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-scripts" (OuterVolumeSpecName: "scripts") pod "a0820dcb-cd35-41c2-8977-7d999feab9b2" (UID: "a0820dcb-cd35-41c2-8977-7d999feab9b2"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.157206 4868 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.157244 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.157256 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.157264 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a0820dcb-cd35-41c2-8977-7d999feab9b2-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.167212 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "a0820dcb-cd35-41c2-8977-7d999feab9b2" (UID: "a0820dcb-cd35-41c2-8977-7d999feab9b2"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.237302 4868 scope.go:117] "RemoveContainer" containerID="6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.258819 4868 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a0820dcb-cd35-41c2-8977-7d999feab9b2-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.260532 4868 scope.go:117] "RemoveContainer" containerID="88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab" Oct 03 13:12:13 crc kubenswrapper[4868]: E1003 13:12:13.261019 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab\": container with ID starting with 88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab not found: ID does not exist" containerID="88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.261158 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab"} err="failed to get container status \"88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab\": rpc error: code = NotFound desc = could not find container \"88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab\": container with ID starting with 88f04071e2e91a0a4a3da9af023fefe0b657dd34d2721a2ab43b91cd677730ab not found: ID does not exist" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.261252 4868 scope.go:117] "RemoveContainer" containerID="6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9" Oct 03 13:12:13 crc kubenswrapper[4868]: E1003 13:12:13.261875 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9\": container with ID starting with 6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9 not found: ID does not exist" containerID="6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.261918 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9"} err="failed to get container status \"6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9\": rpc error: code = NotFound desc = could not find container \"6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9\": container with ID starting with 6e2874a9af3a6e97d74530835d6a31be64a8da356ac7c22913966aa5fd899cd9 not found: ID does not exist" Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.369574 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-545874b5c8-jnl6d"] Oct 03 13:12:13 crc kubenswrapper[4868]: I1003 13:12:13.377604 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-545874b5c8-jnl6d"] Oct 03 13:12:14 crc kubenswrapper[4868]: I1003 13:12:14.033867 4868 generic.go:334] "Generic (PLEG): container finished" podID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerID="800da94e35e2afd56472e53f231865f9e1fd639c9e44d7a684f5c583d3a882bb" exitCode=0 Oct 03 13:12:14 crc kubenswrapper[4868]: I1003 13:12:14.034300 4868 generic.go:334] "Generic (PLEG): container finished" podID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerID="d8d95b978decdc1b6d22ac0c512fe4488fb7d9788b4aa34f6a160b09d48829a5" exitCode=2 Oct 03 13:12:14 crc kubenswrapper[4868]: I1003 13:12:14.034163 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerDied","Data":"800da94e35e2afd56472e53f231865f9e1fd639c9e44d7a684f5c583d3a882bb"} Oct 03 13:12:14 crc kubenswrapper[4868]: I1003 13:12:14.034392 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerDied","Data":"d8d95b978decdc1b6d22ac0c512fe4488fb7d9788b4aa34f6a160b09d48829a5"} Oct 03 13:12:14 crc kubenswrapper[4868]: I1003 13:12:14.034421 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerDied","Data":"3d44132a504a043606ee71b4269ea98fbd7bc828cc9cdcf80cbe91c64540c974"} Oct 03 13:12:14 crc kubenswrapper[4868]: I1003 13:12:14.034316 4868 generic.go:334] "Generic (PLEG): container finished" podID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerID="3d44132a504a043606ee71b4269ea98fbd7bc828cc9cdcf80cbe91c64540c974" exitCode=0 Oct 03 13:12:14 crc kubenswrapper[4868]: I1003 13:12:14.554867 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" path="/var/lib/kubelet/pods/a0820dcb-cd35-41c2-8977-7d999feab9b2/volumes" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.063863 4868 generic.go:334] "Generic (PLEG): container finished" podID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerID="84cedba432c2e66e583f58046af47ce7e5662a7cead40e165a55c289cc880a2e" exitCode=0 Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.063946 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerDied","Data":"84cedba432c2e66e583f58046af47ce7e5662a7cead40e165a55c289cc880a2e"} Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.064231 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"e192c06a-73b7-44d7-afdb-002c1c96db4e","Type":"ContainerDied","Data":"573401a25d2bc26097783fafc8c3f785e0bdfae4976307180c358578db8da7ed"} Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.064249 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="573401a25d2bc26097783fafc8c3f785e0bdfae4976307180c358578db8da7ed" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.133584 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248202 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-sg-core-conf-yaml\") pod \"e192c06a-73b7-44d7-afdb-002c1c96db4e\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248315 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-log-httpd\") pod \"e192c06a-73b7-44d7-afdb-002c1c96db4e\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248350 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-config-data\") pod \"e192c06a-73b7-44d7-afdb-002c1c96db4e\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248422 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-combined-ca-bundle\") pod \"e192c06a-73b7-44d7-afdb-002c1c96db4e\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248458 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-scripts\") pod \"e192c06a-73b7-44d7-afdb-002c1c96db4e\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248512 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b47vf\" (UniqueName: \"kubernetes.io/projected/e192c06a-73b7-44d7-afdb-002c1c96db4e-kube-api-access-b47vf\") pod \"e192c06a-73b7-44d7-afdb-002c1c96db4e\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248561 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-run-httpd\") pod \"e192c06a-73b7-44d7-afdb-002c1c96db4e\" (UID: \"e192c06a-73b7-44d7-afdb-002c1c96db4e\") " Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248790 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "e192c06a-73b7-44d7-afdb-002c1c96db4e" (UID: "e192c06a-73b7-44d7-afdb-002c1c96db4e"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.248913 4868 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.249099 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "e192c06a-73b7-44d7-afdb-002c1c96db4e" (UID: "e192c06a-73b7-44d7-afdb-002c1c96db4e"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.254579 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-scripts" (OuterVolumeSpecName: "scripts") pod "e192c06a-73b7-44d7-afdb-002c1c96db4e" (UID: "e192c06a-73b7-44d7-afdb-002c1c96db4e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.260231 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e192c06a-73b7-44d7-afdb-002c1c96db4e-kube-api-access-b47vf" (OuterVolumeSpecName: "kube-api-access-b47vf") pod "e192c06a-73b7-44d7-afdb-002c1c96db4e" (UID: "e192c06a-73b7-44d7-afdb-002c1c96db4e"). InnerVolumeSpecName "kube-api-access-b47vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.283462 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "e192c06a-73b7-44d7-afdb-002c1c96db4e" (UID: "e192c06a-73b7-44d7-afdb-002c1c96db4e"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.332491 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e192c06a-73b7-44d7-afdb-002c1c96db4e" (UID: "e192c06a-73b7-44d7-afdb-002c1c96db4e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.351119 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.351165 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.351178 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b47vf\" (UniqueName: \"kubernetes.io/projected/e192c06a-73b7-44d7-afdb-002c1c96db4e-kube-api-access-b47vf\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.351194 4868 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/e192c06a-73b7-44d7-afdb-002c1c96db4e-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.351205 4868 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.357187 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.357263 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.375600 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-config-data" (OuterVolumeSpecName: "config-data") pod "e192c06a-73b7-44d7-afdb-002c1c96db4e" (UID: "e192c06a-73b7-44d7-afdb-002c1c96db4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.399043 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.406849 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Oct 03 13:12:17 crc kubenswrapper[4868]: I1003 13:12:17.453719 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e192c06a-73b7-44d7-afdb-002c1c96db4e-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.073276 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.073710 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.073911 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.114569 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.125720 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.145163 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.145989 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddf7e94d-f6c1-450a-8ef1-8729c1f078ba" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146010 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddf7e94d-f6c1-450a-8ef1-8729c1f078ba" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146024 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-notification-agent" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146031 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-notification-agent" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146079 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="014e693d-4546-4fdf-b800-2d4263f9aedf" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146087 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="014e693d-4546-4fdf-b800-2d4263f9aedf" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146096 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="sg-core" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146102 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="sg-core" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146121 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146127 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146142 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon-log" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146148 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon-log" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146170 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="proxy-httpd" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146175 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="proxy-httpd" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146191 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-central-agent" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146197 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-central-agent" Oct 03 13:12:18 crc kubenswrapper[4868]: E1003 13:12:18.146211 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d96bdf5a-e79e-4746-aa96-32043e17fcb4" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146217 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="d96bdf5a-e79e-4746-aa96-32043e17fcb4" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146410 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-central-agent" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146453 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="d96bdf5a-e79e-4746-aa96-32043e17fcb4" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146466 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddf7e94d-f6c1-450a-8ef1-8729c1f078ba" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146476 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="014e693d-4546-4fdf-b800-2d4263f9aedf" containerName="mariadb-database-create" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146488 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146495 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="ceilometer-notification-agent" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146515 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="sg-core" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146530 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" containerName="proxy-httpd" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.146541 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0820dcb-cd35-41c2-8977-7d999feab9b2" containerName="horizon-log" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.148572 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.151744 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.151981 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.154379 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.269897 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.270187 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-log-httpd\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.270843 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.270898 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-config-data\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.270990 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-scripts\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.271071 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-run-httpd\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.271266 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh8rk\" (UniqueName: \"kubernetes.io/projected/a8ca204c-f593-4157-98b7-ab953f2e7442-kube-api-access-lh8rk\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.373547 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-log-httpd\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.373612 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.373638 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-config-data\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.373677 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-scripts\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.373705 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-run-httpd\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.373743 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh8rk\" (UniqueName: \"kubernetes.io/projected/a8ca204c-f593-4157-98b7-ab953f2e7442-kube-api-access-lh8rk\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.373832 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.374408 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-log-httpd\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.375318 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-run-httpd\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.378846 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-scripts\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.379328 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.380208 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.383180 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-config-data\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.401843 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh8rk\" (UniqueName: \"kubernetes.io/projected/a8ca204c-f593-4157-98b7-ab953f2e7442-kube-api-access-lh8rk\") pod \"ceilometer-0\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.476162 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.561554 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e192c06a-73b7-44d7-afdb-002c1c96db4e" path="/var/lib/kubelet/pods/e192c06a-73b7-44d7-afdb-002c1c96db4e/volumes" Oct 03 13:12:18 crc kubenswrapper[4868]: W1003 13:12:18.944261 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda8ca204c_f593_4157_98b7_ab953f2e7442.slice/crio-0413c16ea51b11d79941c176733c777bfb0dee335b9051a518c645b5f2dec9f0 WatchSource:0}: Error finding container 0413c16ea51b11d79941c176733c777bfb0dee335b9051a518c645b5f2dec9f0: Status 404 returned error can't find the container with id 0413c16ea51b11d79941c176733c777bfb0dee335b9051a518c645b5f2dec9f0 Oct 03 13:12:18 crc kubenswrapper[4868]: I1003 13:12:18.946720 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.086487 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.087672 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerStarted","Data":"0413c16ea51b11d79941c176733c777bfb0dee335b9051a518c645b5f2dec9f0"} Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.171980 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-93b9-account-create-x6xqg"] Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.174340 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-93b9-account-create-x6xqg" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.182180 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.189012 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-93b9-account-create-x6xqg"] Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.296820 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rqfhs\" (UniqueName: \"kubernetes.io/projected/9a21c034-e84f-4405-ac68-03ddba17adc0-kube-api-access-rqfhs\") pod \"nova-api-93b9-account-create-x6xqg\" (UID: \"9a21c034-e84f-4405-ac68-03ddba17adc0\") " pod="openstack/nova-api-93b9-account-create-x6xqg" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.346241 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.346296 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.371113 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-e702-account-create-6sf9b"] Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.372728 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e702-account-create-6sf9b" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.377904 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.381905 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e702-account-create-6sf9b"] Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.398987 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rqfhs\" (UniqueName: \"kubernetes.io/projected/9a21c034-e84f-4405-ac68-03ddba17adc0-kube-api-access-rqfhs\") pod \"nova-api-93b9-account-create-x6xqg\" (UID: \"9a21c034-e84f-4405-ac68-03ddba17adc0\") " pod="openstack/nova-api-93b9-account-create-x6xqg" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.411966 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.424160 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rqfhs\" (UniqueName: \"kubernetes.io/projected/9a21c034-e84f-4405-ac68-03ddba17adc0-kube-api-access-rqfhs\") pod \"nova-api-93b9-account-create-x6xqg\" (UID: \"9a21c034-e84f-4405-ac68-03ddba17adc0\") " pod="openstack/nova-api-93b9-account-create-x6xqg" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.438372 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.504144 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrf9b\" (UniqueName: \"kubernetes.io/projected/2ab9aaef-7f57-4e05-a309-808b35cf821b-kube-api-access-lrf9b\") pod \"nova-cell0-e702-account-create-6sf9b\" (UID: \"2ab9aaef-7f57-4e05-a309-808b35cf821b\") " pod="openstack/nova-cell0-e702-account-create-6sf9b" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.505043 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-93b9-account-create-x6xqg" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.555863 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-418d-account-create-hrxpx"] Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.557487 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-418d-account-create-hrxpx" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.560238 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.573245 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-418d-account-create-hrxpx"] Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.619348 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrf9b\" (UniqueName: \"kubernetes.io/projected/2ab9aaef-7f57-4e05-a309-808b35cf821b-kube-api-access-lrf9b\") pod \"nova-cell0-e702-account-create-6sf9b\" (UID: \"2ab9aaef-7f57-4e05-a309-808b35cf821b\") " pod="openstack/nova-cell0-e702-account-create-6sf9b" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.647354 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrf9b\" (UniqueName: \"kubernetes.io/projected/2ab9aaef-7f57-4e05-a309-808b35cf821b-kube-api-access-lrf9b\") pod \"nova-cell0-e702-account-create-6sf9b\" (UID: \"2ab9aaef-7f57-4e05-a309-808b35cf821b\") " pod="openstack/nova-cell0-e702-account-create-6sf9b" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.705091 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e702-account-create-6sf9b" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.723133 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm5ms\" (UniqueName: \"kubernetes.io/projected/dd9bdd67-1db6-4099-b277-c751798a6d1b-kube-api-access-lm5ms\") pod \"nova-cell1-418d-account-create-hrxpx\" (UID: \"dd9bdd67-1db6-4099-b277-c751798a6d1b\") " pod="openstack/nova-cell1-418d-account-create-hrxpx" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.825607 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm5ms\" (UniqueName: \"kubernetes.io/projected/dd9bdd67-1db6-4099-b277-c751798a6d1b-kube-api-access-lm5ms\") pod \"nova-cell1-418d-account-create-hrxpx\" (UID: \"dd9bdd67-1db6-4099-b277-c751798a6d1b\") " pod="openstack/nova-cell1-418d-account-create-hrxpx" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.843510 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm5ms\" (UniqueName: \"kubernetes.io/projected/dd9bdd67-1db6-4099-b277-c751798a6d1b-kube-api-access-lm5ms\") pod \"nova-cell1-418d-account-create-hrxpx\" (UID: \"dd9bdd67-1db6-4099-b277-c751798a6d1b\") " pod="openstack/nova-cell1-418d-account-create-hrxpx" Oct 03 13:12:19 crc kubenswrapper[4868]: I1003 13:12:19.968142 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-418d-account-create-hrxpx" Oct 03 13:12:20 crc kubenswrapper[4868]: W1003 13:12:20.019749 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9a21c034_e84f_4405_ac68_03ddba17adc0.slice/crio-8f1282c75052f424df694e0241ab9b303fcb573c57d1ab4f4b6a814fc4279245 WatchSource:0}: Error finding container 8f1282c75052f424df694e0241ab9b303fcb573c57d1ab4f4b6a814fc4279245: Status 404 returned error can't find the container with id 8f1282c75052f424df694e0241ab9b303fcb573c57d1ab4f4b6a814fc4279245 Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.023704 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-93b9-account-create-x6xqg"] Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.124789 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerStarted","Data":"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c"} Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.132136 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-93b9-account-create-x6xqg" event={"ID":"9a21c034-e84f-4405-ac68-03ddba17adc0","Type":"ContainerStarted","Data":"8f1282c75052f424df694e0241ab9b303fcb573c57d1ab4f4b6a814fc4279245"} Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.132179 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.132191 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.262078 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-e702-account-create-6sf9b"] Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.461953 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.462658 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.505681 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-418d-account-create-hrxpx"] Oct 03 13:12:20 crc kubenswrapper[4868]: I1003 13:12:20.609220 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.146155 4868 generic.go:334] "Generic (PLEG): container finished" podID="9a21c034-e84f-4405-ac68-03ddba17adc0" containerID="67e89954d0d2edc51b22d8ccedd8361ae125c5ecb1f2fbf1be891f048c8a8abd" exitCode=0 Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.146236 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-93b9-account-create-x6xqg" event={"ID":"9a21c034-e84f-4405-ac68-03ddba17adc0","Type":"ContainerDied","Data":"67e89954d0d2edc51b22d8ccedd8361ae125c5ecb1f2fbf1be891f048c8a8abd"} Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.147979 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerStarted","Data":"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df"} Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.149431 4868 generic.go:334] "Generic (PLEG): container finished" podID="dd9bdd67-1db6-4099-b277-c751798a6d1b" containerID="d33c69860a250dd64929097216750813df2f25f502ae28b5d02c2f11dd33c1b3" exitCode=0 Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.149501 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-418d-account-create-hrxpx" event={"ID":"dd9bdd67-1db6-4099-b277-c751798a6d1b","Type":"ContainerDied","Data":"d33c69860a250dd64929097216750813df2f25f502ae28b5d02c2f11dd33c1b3"} Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.149522 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-418d-account-create-hrxpx" event={"ID":"dd9bdd67-1db6-4099-b277-c751798a6d1b","Type":"ContainerStarted","Data":"8dd465568df54cc765b25234a0c0f3d717ee1b7d51941e56346d4f3fd3e01662"} Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.152920 4868 generic.go:334] "Generic (PLEG): container finished" podID="2ab9aaef-7f57-4e05-a309-808b35cf821b" containerID="a84de38853047c2c0293b71c0de505d17e471f9c2b65068b0daad3f50e291fce" exitCode=0 Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.154231 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e702-account-create-6sf9b" event={"ID":"2ab9aaef-7f57-4e05-a309-808b35cf821b","Type":"ContainerDied","Data":"a84de38853047c2c0293b71c0de505d17e471f9c2b65068b0daad3f50e291fce"} Oct 03 13:12:21 crc kubenswrapper[4868]: I1003 13:12:21.154267 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e702-account-create-6sf9b" event={"ID":"2ab9aaef-7f57-4e05-a309-808b35cf821b","Type":"ContainerStarted","Data":"5145e9a659bce0e9f5fc05bead05d6df9d22796e31a02e028d32108957fddcaa"} Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.103789 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-cfd6858f-9jn5h" Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.166630 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerStarted","Data":"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280"} Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.166718 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.166739 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.205664 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-56b4dd546d-pngbh"] Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.205933 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-56b4dd546d-pngbh" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-api" containerID="cri-o://7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d" gracePeriod=30 Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.206447 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-56b4dd546d-pngbh" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-httpd" containerID="cri-o://5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97" gracePeriod=30 Oct 03 13:12:22 crc kubenswrapper[4868]: I1003 13:12:22.981361 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-418d-account-create-hrxpx" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.083708 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.139262 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm5ms\" (UniqueName: \"kubernetes.io/projected/dd9bdd67-1db6-4099-b277-c751798a6d1b-kube-api-access-lm5ms\") pod \"dd9bdd67-1db6-4099-b277-c751798a6d1b\" (UID: \"dd9bdd67-1db6-4099-b277-c751798a6d1b\") " Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.156400 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd9bdd67-1db6-4099-b277-c751798a6d1b-kube-api-access-lm5ms" (OuterVolumeSpecName: "kube-api-access-lm5ms") pod "dd9bdd67-1db6-4099-b277-c751798a6d1b" (UID: "dd9bdd67-1db6-4099-b277-c751798a6d1b"). InnerVolumeSpecName "kube-api-access-lm5ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.192415 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-e702-account-create-6sf9b" event={"ID":"2ab9aaef-7f57-4e05-a309-808b35cf821b","Type":"ContainerDied","Data":"5145e9a659bce0e9f5fc05bead05d6df9d22796e31a02e028d32108957fddcaa"} Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.192474 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5145e9a659bce0e9f5fc05bead05d6df9d22796e31a02e028d32108957fddcaa" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.196822 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-418d-account-create-hrxpx" event={"ID":"dd9bdd67-1db6-4099-b277-c751798a6d1b","Type":"ContainerDied","Data":"8dd465568df54cc765b25234a0c0f3d717ee1b7d51941e56346d4f3fd3e01662"} Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.196850 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8dd465568df54cc765b25234a0c0f3d717ee1b7d51941e56346d4f3fd3e01662" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.196920 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-418d-account-create-hrxpx" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.200042 4868 generic.go:334] "Generic (PLEG): container finished" podID="39238e33-e64b-43b1-bb17-b81edeb45567" containerID="5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97" exitCode=0 Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.200190 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b4dd546d-pngbh" event={"ID":"39238e33-e64b-43b1-bb17-b81edeb45567","Type":"ContainerDied","Data":"5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97"} Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.200223 4868 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.245498 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm5ms\" (UniqueName: \"kubernetes.io/projected/dd9bdd67-1db6-4099-b277-c751798a6d1b-kube-api-access-lm5ms\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.263691 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.271444 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e702-account-create-6sf9b" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.314853 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-93b9-account-create-x6xqg" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.351432 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrf9b\" (UniqueName: \"kubernetes.io/projected/2ab9aaef-7f57-4e05-a309-808b35cf821b-kube-api-access-lrf9b\") pod \"2ab9aaef-7f57-4e05-a309-808b35cf821b\" (UID: \"2ab9aaef-7f57-4e05-a309-808b35cf821b\") " Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.375318 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2ab9aaef-7f57-4e05-a309-808b35cf821b-kube-api-access-lrf9b" (OuterVolumeSpecName: "kube-api-access-lrf9b") pod "2ab9aaef-7f57-4e05-a309-808b35cf821b" (UID: "2ab9aaef-7f57-4e05-a309-808b35cf821b"). InnerVolumeSpecName "kube-api-access-lrf9b". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.455174 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rqfhs\" (UniqueName: \"kubernetes.io/projected/9a21c034-e84f-4405-ac68-03ddba17adc0-kube-api-access-rqfhs\") pod \"9a21c034-e84f-4405-ac68-03ddba17adc0\" (UID: \"9a21c034-e84f-4405-ac68-03ddba17adc0\") " Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.455651 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrf9b\" (UniqueName: \"kubernetes.io/projected/2ab9aaef-7f57-4e05-a309-808b35cf821b-kube-api-access-lrf9b\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.463540 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a21c034-e84f-4405-ac68-03ddba17adc0-kube-api-access-rqfhs" (OuterVolumeSpecName: "kube-api-access-rqfhs") pod "9a21c034-e84f-4405-ac68-03ddba17adc0" (UID: "9a21c034-e84f-4405-ac68-03ddba17adc0"). InnerVolumeSpecName "kube-api-access-rqfhs". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:23 crc kubenswrapper[4868]: I1003 13:12:23.557608 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rqfhs\" (UniqueName: \"kubernetes.io/projected/9a21c034-e84f-4405-ac68-03ddba17adc0-kube-api-access-rqfhs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:24 crc kubenswrapper[4868]: I1003 13:12:24.213801 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerStarted","Data":"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370"} Oct 03 13:12:24 crc kubenswrapper[4868]: I1003 13:12:24.214324 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:12:24 crc kubenswrapper[4868]: I1003 13:12:24.215841 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-93b9-account-create-x6xqg" event={"ID":"9a21c034-e84f-4405-ac68-03ddba17adc0","Type":"ContainerDied","Data":"8f1282c75052f424df694e0241ab9b303fcb573c57d1ab4f4b6a814fc4279245"} Oct 03 13:12:24 crc kubenswrapper[4868]: I1003 13:12:24.215870 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-e702-account-create-6sf9b" Oct 03 13:12:24 crc kubenswrapper[4868]: I1003 13:12:24.215888 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8f1282c75052f424df694e0241ab9b303fcb573c57d1ab4f4b6a814fc4279245" Oct 03 13:12:24 crc kubenswrapper[4868]: I1003 13:12:24.215915 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-93b9-account-create-x6xqg" Oct 03 13:12:24 crc kubenswrapper[4868]: I1003 13:12:24.348427 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.14567531 podStartE2EDuration="6.348396676s" podCreationTimestamp="2025-10-03 13:12:18 +0000 UTC" firstStartedPulling="2025-10-03 13:12:18.946561117 +0000 UTC m=+1335.156410183" lastFinishedPulling="2025-10-03 13:12:23.149282483 +0000 UTC m=+1339.359131549" observedRunningTime="2025-10-03 13:12:24.259363059 +0000 UTC m=+1340.469212145" watchObservedRunningTime="2025-10-03 13:12:24.348396676 +0000 UTC m=+1340.558245742" Oct 03 13:12:24 crc kubenswrapper[4868]: E1003 13:12:24.587211 4868 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod39238e33_e64b_43b1_bb17_b81edeb45567.slice/crio-7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d.scope\": RecentStats: unable to find data in memory cache]" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.122558 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.194896 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-config\") pod \"39238e33-e64b-43b1-bb17-b81edeb45567\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.195275 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-httpd-config\") pod \"39238e33-e64b-43b1-bb17-b81edeb45567\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.195691 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m67v\" (UniqueName: \"kubernetes.io/projected/39238e33-e64b-43b1-bb17-b81edeb45567-kube-api-access-6m67v\") pod \"39238e33-e64b-43b1-bb17-b81edeb45567\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.202514 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-combined-ca-bundle\") pod \"39238e33-e64b-43b1-bb17-b81edeb45567\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.203415 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-ovndb-tls-certs\") pod \"39238e33-e64b-43b1-bb17-b81edeb45567\" (UID: \"39238e33-e64b-43b1-bb17-b81edeb45567\") " Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.219128 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39238e33-e64b-43b1-bb17-b81edeb45567-kube-api-access-6m67v" (OuterVolumeSpecName: "kube-api-access-6m67v") pod "39238e33-e64b-43b1-bb17-b81edeb45567" (UID: "39238e33-e64b-43b1-bb17-b81edeb45567"). InnerVolumeSpecName "kube-api-access-6m67v". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.221150 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "39238e33-e64b-43b1-bb17-b81edeb45567" (UID: "39238e33-e64b-43b1-bb17-b81edeb45567"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.234733 4868 generic.go:334] "Generic (PLEG): container finished" podID="39238e33-e64b-43b1-bb17-b81edeb45567" containerID="7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d" exitCode=0 Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.235008 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b4dd546d-pngbh" event={"ID":"39238e33-e64b-43b1-bb17-b81edeb45567","Type":"ContainerDied","Data":"7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d"} Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.235096 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-56b4dd546d-pngbh" event={"ID":"39238e33-e64b-43b1-bb17-b81edeb45567","Type":"ContainerDied","Data":"260c10a1ae01d0fef1eea44fee5a4a441f978f3b1b8bd3af78adb25e78e2cfb8"} Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.235120 4868 scope.go:117] "RemoveContainer" containerID="5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.235363 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-56b4dd546d-pngbh" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.282715 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-config" (OuterVolumeSpecName: "config") pod "39238e33-e64b-43b1-bb17-b81edeb45567" (UID: "39238e33-e64b-43b1-bb17-b81edeb45567"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.285346 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "39238e33-e64b-43b1-bb17-b81edeb45567" (UID: "39238e33-e64b-43b1-bb17-b81edeb45567"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.311097 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.312891 4868 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-httpd-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.312915 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m67v\" (UniqueName: \"kubernetes.io/projected/39238e33-e64b-43b1-bb17-b81edeb45567-kube-api-access-6m67v\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.312930 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.324961 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "39238e33-e64b-43b1-bb17-b81edeb45567" (UID: "39238e33-e64b-43b1-bb17-b81edeb45567"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.363017 4868 scope.go:117] "RemoveContainer" containerID="7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.393890 4868 scope.go:117] "RemoveContainer" containerID="5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97" Oct 03 13:12:25 crc kubenswrapper[4868]: E1003 13:12:25.394634 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97\": container with ID starting with 5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97 not found: ID does not exist" containerID="5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.394692 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97"} err="failed to get container status \"5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97\": rpc error: code = NotFound desc = could not find container \"5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97\": container with ID starting with 5c870148f91f5331bddba6c6adf8b063e70ba2242ff57f2759d129a50e2a0d97 not found: ID does not exist" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.394723 4868 scope.go:117] "RemoveContainer" containerID="7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d" Oct 03 13:12:25 crc kubenswrapper[4868]: E1003 13:12:25.395400 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d\": container with ID starting with 7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d not found: ID does not exist" containerID="7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.395431 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d"} err="failed to get container status \"7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d\": rpc error: code = NotFound desc = could not find container \"7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d\": container with ID starting with 7614e0b90798a5d65ef92b904c967b8678b1286c2b5e2c247ecd7c9a43ab645d not found: ID does not exist" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.415526 4868 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/39238e33-e64b-43b1-bb17-b81edeb45567-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.570242 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-56b4dd546d-pngbh"] Oct 03 13:12:25 crc kubenswrapper[4868]: I1003 13:12:25.582300 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-56b4dd546d-pngbh"] Oct 03 13:12:26 crc kubenswrapper[4868]: I1003 13:12:26.558273 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" path="/var/lib/kubelet/pods/39238e33-e64b-43b1-bb17-b81edeb45567/volumes" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.987700 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fwnkn"] Oct 03 13:12:29 crc kubenswrapper[4868]: E1003 13:12:29.988650 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a21c034-e84f-4405-ac68-03ddba17adc0" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.988671 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a21c034-e84f-4405-ac68-03ddba17adc0" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: E1003 13:12:29.988684 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-api" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.988693 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-api" Oct 03 13:12:29 crc kubenswrapper[4868]: E1003 13:12:29.988738 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2ab9aaef-7f57-4e05-a309-808b35cf821b" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.988747 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="2ab9aaef-7f57-4e05-a309-808b35cf821b" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: E1003 13:12:29.988768 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd9bdd67-1db6-4099-b277-c751798a6d1b" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.988777 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd9bdd67-1db6-4099-b277-c751798a6d1b" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: E1003 13:12:29.988794 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-httpd" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.988802 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-httpd" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.989077 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-httpd" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.989097 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="39238e33-e64b-43b1-bb17-b81edeb45567" containerName="neutron-api" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.989119 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a21c034-e84f-4405-ac68-03ddba17adc0" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.989139 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd9bdd67-1db6-4099-b277-c751798a6d1b" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.989153 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="2ab9aaef-7f57-4e05-a309-808b35cf821b" containerName="mariadb-account-create" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.990330 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.993180 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.993368 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cpwt7" Oct 03 13:12:29 crc kubenswrapper[4868]: I1003 13:12:29.993785 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.004035 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fwnkn"] Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.127938 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-config-data\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.128020 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.128141 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtkrt\" (UniqueName: \"kubernetes.io/projected/476384d7-320b-4e69-81ea-b5193f3c944a-kube-api-access-jtkrt\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.128173 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-scripts\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.230416 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-config-data\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.230556 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.230620 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtkrt\" (UniqueName: \"kubernetes.io/projected/476384d7-320b-4e69-81ea-b5193f3c944a-kube-api-access-jtkrt\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.230663 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-scripts\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.239649 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-scripts\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.240329 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-config-data\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.241375 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.253389 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtkrt\" (UniqueName: \"kubernetes.io/projected/476384d7-320b-4e69-81ea-b5193f3c944a-kube-api-access-jtkrt\") pod \"nova-cell0-conductor-db-sync-fwnkn\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.332394 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.844992 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.845672 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-central-agent" containerID="cri-o://029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c" gracePeriod=30 Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.846294 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="proxy-httpd" containerID="cri-o://b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370" gracePeriod=30 Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.846360 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="sg-core" containerID="cri-o://94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280" gracePeriod=30 Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.846403 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-notification-agent" containerID="cri-o://e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df" gracePeriod=30 Oct 03 13:12:30 crc kubenswrapper[4868]: I1003 13:12:30.859340 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fwnkn"] Oct 03 13:12:31 crc kubenswrapper[4868]: I1003 13:12:31.303169 4868 generic.go:334] "Generic (PLEG): container finished" podID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerID="b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370" exitCode=0 Oct 03 13:12:31 crc kubenswrapper[4868]: I1003 13:12:31.303486 4868 generic.go:334] "Generic (PLEG): container finished" podID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerID="94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280" exitCode=2 Oct 03 13:12:31 crc kubenswrapper[4868]: I1003 13:12:31.303258 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerDied","Data":"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370"} Oct 03 13:12:31 crc kubenswrapper[4868]: I1003 13:12:31.303567 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerDied","Data":"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280"} Oct 03 13:12:31 crc kubenswrapper[4868]: I1003 13:12:31.305982 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" event={"ID":"476384d7-320b-4e69-81ea-b5193f3c944a","Type":"ContainerStarted","Data":"36fb2f0899c6774d604f981a9e426e1b24145ebcb957a6df37964ae35e9676c2"} Oct 03 13:12:31 crc kubenswrapper[4868]: I1003 13:12:31.945041 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.068428 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh8rk\" (UniqueName: \"kubernetes.io/projected/a8ca204c-f593-4157-98b7-ab953f2e7442-kube-api-access-lh8rk\") pod \"a8ca204c-f593-4157-98b7-ab953f2e7442\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.068493 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-combined-ca-bundle\") pod \"a8ca204c-f593-4157-98b7-ab953f2e7442\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.068595 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-scripts\") pod \"a8ca204c-f593-4157-98b7-ab953f2e7442\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.068644 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-log-httpd\") pod \"a8ca204c-f593-4157-98b7-ab953f2e7442\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.068702 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-run-httpd\") pod \"a8ca204c-f593-4157-98b7-ab953f2e7442\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.068770 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-sg-core-conf-yaml\") pod \"a8ca204c-f593-4157-98b7-ab953f2e7442\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.068788 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-config-data\") pod \"a8ca204c-f593-4157-98b7-ab953f2e7442\" (UID: \"a8ca204c-f593-4157-98b7-ab953f2e7442\") " Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.069362 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "a8ca204c-f593-4157-98b7-ab953f2e7442" (UID: "a8ca204c-f593-4157-98b7-ab953f2e7442"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.070083 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "a8ca204c-f593-4157-98b7-ab953f2e7442" (UID: "a8ca204c-f593-4157-98b7-ab953f2e7442"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.076704 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-scripts" (OuterVolumeSpecName: "scripts") pod "a8ca204c-f593-4157-98b7-ab953f2e7442" (UID: "a8ca204c-f593-4157-98b7-ab953f2e7442"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.076831 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a8ca204c-f593-4157-98b7-ab953f2e7442-kube-api-access-lh8rk" (OuterVolumeSpecName: "kube-api-access-lh8rk") pod "a8ca204c-f593-4157-98b7-ab953f2e7442" (UID: "a8ca204c-f593-4157-98b7-ab953f2e7442"). InnerVolumeSpecName "kube-api-access-lh8rk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.104356 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "a8ca204c-f593-4157-98b7-ab953f2e7442" (UID: "a8ca204c-f593-4157-98b7-ab953f2e7442"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.146469 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.146543 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.171693 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh8rk\" (UniqueName: \"kubernetes.io/projected/a8ca204c-f593-4157-98b7-ab953f2e7442-kube-api-access-lh8rk\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.171729 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.171739 4868 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.171748 4868 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a8ca204c-f593-4157-98b7-ab953f2e7442-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.171756 4868 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.176824 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a8ca204c-f593-4157-98b7-ab953f2e7442" (UID: "a8ca204c-f593-4157-98b7-ab953f2e7442"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.200359 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-config-data" (OuterVolumeSpecName: "config-data") pod "a8ca204c-f593-4157-98b7-ab953f2e7442" (UID: "a8ca204c-f593-4157-98b7-ab953f2e7442"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.274699 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.274793 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8ca204c-f593-4157-98b7-ab953f2e7442-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.325015 4868 generic.go:334] "Generic (PLEG): container finished" podID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerID="e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df" exitCode=0 Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.325071 4868 generic.go:334] "Generic (PLEG): container finished" podID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerID="029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c" exitCode=0 Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.325096 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerDied","Data":"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df"} Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.325128 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerDied","Data":"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c"} Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.325159 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a8ca204c-f593-4157-98b7-ab953f2e7442","Type":"ContainerDied","Data":"0413c16ea51b11d79941c176733c777bfb0dee335b9051a518c645b5f2dec9f0"} Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.325178 4868 scope.go:117] "RemoveContainer" containerID="b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.325360 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.367450 4868 scope.go:117] "RemoveContainer" containerID="94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.373582 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.393615 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.406559 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.407091 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="proxy-httpd" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407106 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="proxy-httpd" Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.407128 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="sg-core" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407134 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="sg-core" Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.407146 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-central-agent" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407154 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-central-agent" Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.407168 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-notification-agent" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407175 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-notification-agent" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407346 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="proxy-httpd" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407358 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="sg-core" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407369 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-notification-agent" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.407377 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" containerName="ceilometer-central-agent" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.409112 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.415338 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.415560 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.416660 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.418307 4868 scope.go:117] "RemoveContainer" containerID="e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.471822 4868 scope.go:117] "RemoveContainer" containerID="029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.504064 4868 scope.go:117] "RemoveContainer" containerID="b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370" Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.504691 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370\": container with ID starting with b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370 not found: ID does not exist" containerID="b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.504798 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370"} err="failed to get container status \"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370\": rpc error: code = NotFound desc = could not find container \"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370\": container with ID starting with b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370 not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.504846 4868 scope.go:117] "RemoveContainer" containerID="94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280" Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.505644 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280\": container with ID starting with 94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280 not found: ID does not exist" containerID="94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.505680 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280"} err="failed to get container status \"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280\": rpc error: code = NotFound desc = could not find container \"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280\": container with ID starting with 94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280 not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.505698 4868 scope.go:117] "RemoveContainer" containerID="e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df" Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.508597 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df\": container with ID starting with e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df not found: ID does not exist" containerID="e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.508657 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df"} err="failed to get container status \"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df\": rpc error: code = NotFound desc = could not find container \"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df\": container with ID starting with e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.508699 4868 scope.go:117] "RemoveContainer" containerID="029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c" Oct 03 13:12:32 crc kubenswrapper[4868]: E1003 13:12:32.509276 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c\": container with ID starting with 029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c not found: ID does not exist" containerID="029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.509333 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c"} err="failed to get container status \"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c\": rpc error: code = NotFound desc = could not find container \"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c\": container with ID starting with 029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.509376 4868 scope.go:117] "RemoveContainer" containerID="b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.509716 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370"} err="failed to get container status \"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370\": rpc error: code = NotFound desc = could not find container \"b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370\": container with ID starting with b2f1f44d0423a134a20f35ba08717b64dff1d254f7199d80307f71247fce0370 not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.509738 4868 scope.go:117] "RemoveContainer" containerID="94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.509940 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280"} err="failed to get container status \"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280\": rpc error: code = NotFound desc = could not find container \"94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280\": container with ID starting with 94bfed7ce65175d654a08dbecd1e731eede542d60ba9e00a5d75329125a88280 not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.509966 4868 scope.go:117] "RemoveContainer" containerID="e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.510220 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df"} err="failed to get container status \"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df\": rpc error: code = NotFound desc = could not find container \"e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df\": container with ID starting with e2ef92369e2cb16d3e704c9c8b537de6afbafd7acb9a53ad74253bafde7ec1df not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.510249 4868 scope.go:117] "RemoveContainer" containerID="029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.510921 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c"} err="failed to get container status \"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c\": rpc error: code = NotFound desc = could not find container \"029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c\": container with ID starting with 029a708525f9a1034e4c885061d72af02baa12ea31f69aa557a2e2cd2e1ba37c not found: ID does not exist" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.562411 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a8ca204c-f593-4157-98b7-ab953f2e7442" path="/var/lib/kubelet/pods/a8ca204c-f593-4157-98b7-ab953f2e7442/volumes" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.582534 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-run-httpd\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.582588 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-scripts\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.582726 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-log-httpd\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.582747 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.582780 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-config-data\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.582818 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l4lv5\" (UniqueName: \"kubernetes.io/projected/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-kube-api-access-l4lv5\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.582837 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.685199 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-config-data\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.685647 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l4lv5\" (UniqueName: \"kubernetes.io/projected/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-kube-api-access-l4lv5\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.685677 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.687528 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-run-httpd\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.688037 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-run-httpd\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.688200 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-scripts\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.688847 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.689034 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-log-httpd\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.694961 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-log-httpd\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.702461 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.702507 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-config-data\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.708180 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-scripts\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.709322 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l4lv5\" (UniqueName: \"kubernetes.io/projected/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-kube-api-access-l4lv5\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.709713 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " pod="openstack/ceilometer-0" Oct 03 13:12:32 crc kubenswrapper[4868]: I1003 13:12:32.757329 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:33 crc kubenswrapper[4868]: I1003 13:12:33.258728 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:33 crc kubenswrapper[4868]: W1003 13:12:33.269389 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podea1557fa_6946_4bb3_8d56_0b52c8e90d29.slice/crio-c42420c81a940474e0bec2ce2fb75c4822e03f3c0999ca0272eaea83ba63f614 WatchSource:0}: Error finding container c42420c81a940474e0bec2ce2fb75c4822e03f3c0999ca0272eaea83ba63f614: Status 404 returned error can't find the container with id c42420c81a940474e0bec2ce2fb75c4822e03f3c0999ca0272eaea83ba63f614 Oct 03 13:12:33 crc kubenswrapper[4868]: I1003 13:12:33.346989 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerStarted","Data":"c42420c81a940474e0bec2ce2fb75c4822e03f3c0999ca0272eaea83ba63f614"} Oct 03 13:12:34 crc kubenswrapper[4868]: I1003 13:12:34.375168 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerStarted","Data":"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2"} Oct 03 13:12:40 crc kubenswrapper[4868]: I1003 13:12:40.081570 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:43 crc kubenswrapper[4868]: I1003 13:12:43.505495 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerStarted","Data":"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64"} Oct 03 13:12:43 crc kubenswrapper[4868]: I1003 13:12:43.508727 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" event={"ID":"476384d7-320b-4e69-81ea-b5193f3c944a","Type":"ContainerStarted","Data":"f8643a2cdc5cd948282c56165245bf88d8d3cb4611968c8fa99a836c64542588"} Oct 03 13:12:43 crc kubenswrapper[4868]: I1003 13:12:43.544493 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" podStartSLOduration=3.070840141 podStartE2EDuration="14.544465268s" podCreationTimestamp="2025-10-03 13:12:29 +0000 UTC" firstStartedPulling="2025-10-03 13:12:30.895704051 +0000 UTC m=+1347.105553117" lastFinishedPulling="2025-10-03 13:12:42.369329178 +0000 UTC m=+1358.579178244" observedRunningTime="2025-10-03 13:12:43.527298878 +0000 UTC m=+1359.737147964" watchObservedRunningTime="2025-10-03 13:12:43.544465268 +0000 UTC m=+1359.754314344" Oct 03 13:12:44 crc kubenswrapper[4868]: I1003 13:12:44.523158 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerStarted","Data":"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5"} Oct 03 13:12:45 crc kubenswrapper[4868]: I1003 13:12:45.536254 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerStarted","Data":"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5"} Oct 03 13:12:45 crc kubenswrapper[4868]: I1003 13:12:45.537130 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:12:45 crc kubenswrapper[4868]: I1003 13:12:45.536525 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-central-agent" containerID="cri-o://6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" gracePeriod=30 Oct 03 13:12:45 crc kubenswrapper[4868]: I1003 13:12:45.536610 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-notification-agent" containerID="cri-o://25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" gracePeriod=30 Oct 03 13:12:45 crc kubenswrapper[4868]: I1003 13:12:45.536611 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="sg-core" containerID="cri-o://d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" gracePeriod=30 Oct 03 13:12:45 crc kubenswrapper[4868]: I1003 13:12:45.536525 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="proxy-httpd" containerID="cri-o://f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" gracePeriod=30 Oct 03 13:12:45 crc kubenswrapper[4868]: I1003 13:12:45.576931 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.903006547 podStartE2EDuration="13.57690637s" podCreationTimestamp="2025-10-03 13:12:32 +0000 UTC" firstStartedPulling="2025-10-03 13:12:33.273290903 +0000 UTC m=+1349.483139959" lastFinishedPulling="2025-10-03 13:12:44.947190706 +0000 UTC m=+1361.157039782" observedRunningTime="2025-10-03 13:12:45.563603223 +0000 UTC m=+1361.773452309" watchObservedRunningTime="2025-10-03 13:12:45.57690637 +0000 UTC m=+1361.786755446" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.457086 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570300 4868 generic.go:334] "Generic (PLEG): container finished" podID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerID="f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" exitCode=0 Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570371 4868 generic.go:334] "Generic (PLEG): container finished" podID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerID="d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" exitCode=2 Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570381 4868 generic.go:334] "Generic (PLEG): container finished" podID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerID="25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" exitCode=0 Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570394 4868 generic.go:334] "Generic (PLEG): container finished" podID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerID="6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" exitCode=0 Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570537 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerDied","Data":"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5"} Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570579 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerDied","Data":"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5"} Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570597 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerDied","Data":"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64"} Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570612 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerDied","Data":"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2"} Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570625 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"ea1557fa-6946-4bb3-8d56-0b52c8e90d29","Type":"ContainerDied","Data":"c42420c81a940474e0bec2ce2fb75c4822e03f3c0999ca0272eaea83ba63f614"} Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570647 4868 scope.go:117] "RemoveContainer" containerID="f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.570856 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.604221 4868 scope.go:117] "RemoveContainer" containerID="d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.624735 4868 scope.go:117] "RemoveContainer" containerID="25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.635308 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-combined-ca-bundle\") pod \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.635463 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l4lv5\" (UniqueName: \"kubernetes.io/projected/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-kube-api-access-l4lv5\") pod \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.635512 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-scripts\") pod \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.635610 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-run-httpd\") pod \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.635700 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-sg-core-conf-yaml\") pod \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.635728 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-config-data\") pod \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.636004 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-log-httpd\") pod \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\" (UID: \"ea1557fa-6946-4bb3-8d56-0b52c8e90d29\") " Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.636267 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "ea1557fa-6946-4bb3-8d56-0b52c8e90d29" (UID: "ea1557fa-6946-4bb3-8d56-0b52c8e90d29"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.636860 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "ea1557fa-6946-4bb3-8d56-0b52c8e90d29" (UID: "ea1557fa-6946-4bb3-8d56-0b52c8e90d29"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.637391 4868 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.637411 4868 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.643178 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-kube-api-access-l4lv5" (OuterVolumeSpecName: "kube-api-access-l4lv5") pod "ea1557fa-6946-4bb3-8d56-0b52c8e90d29" (UID: "ea1557fa-6946-4bb3-8d56-0b52c8e90d29"). InnerVolumeSpecName "kube-api-access-l4lv5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.645238 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-scripts" (OuterVolumeSpecName: "scripts") pod "ea1557fa-6946-4bb3-8d56-0b52c8e90d29" (UID: "ea1557fa-6946-4bb3-8d56-0b52c8e90d29"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.649840 4868 scope.go:117] "RemoveContainer" containerID="6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.668970 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "ea1557fa-6946-4bb3-8d56-0b52c8e90d29" (UID: "ea1557fa-6946-4bb3-8d56-0b52c8e90d29"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.702888 4868 scope.go:117] "RemoveContainer" containerID="f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.703724 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": container with ID starting with f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5 not found: ID does not exist" containerID="f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.703776 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5"} err="failed to get container status \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": rpc error: code = NotFound desc = could not find container \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": container with ID starting with f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.703806 4868 scope.go:117] "RemoveContainer" containerID="d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.704485 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": container with ID starting with d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5 not found: ID does not exist" containerID="d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.704612 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5"} err="failed to get container status \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": rpc error: code = NotFound desc = could not find container \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": container with ID starting with d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.704713 4868 scope.go:117] "RemoveContainer" containerID="25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.705142 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": container with ID starting with 25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64 not found: ID does not exist" containerID="25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.705219 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64"} err="failed to get container status \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": rpc error: code = NotFound desc = could not find container \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": container with ID starting with 25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.705319 4868 scope.go:117] "RemoveContainer" containerID="6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.705947 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": container with ID starting with 6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2 not found: ID does not exist" containerID="6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.706079 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2"} err="failed to get container status \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": rpc error: code = NotFound desc = could not find container \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": container with ID starting with 6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.706178 4868 scope.go:117] "RemoveContainer" containerID="f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.706603 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5"} err="failed to get container status \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": rpc error: code = NotFound desc = could not find container \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": container with ID starting with f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.706659 4868 scope.go:117] "RemoveContainer" containerID="d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.707098 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5"} err="failed to get container status \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": rpc error: code = NotFound desc = could not find container \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": container with ID starting with d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.707128 4868 scope.go:117] "RemoveContainer" containerID="25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.707432 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64"} err="failed to get container status \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": rpc error: code = NotFound desc = could not find container \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": container with ID starting with 25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.707529 4868 scope.go:117] "RemoveContainer" containerID="6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.707800 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2"} err="failed to get container status \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": rpc error: code = NotFound desc = could not find container \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": container with ID starting with 6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.707878 4868 scope.go:117] "RemoveContainer" containerID="f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.708197 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5"} err="failed to get container status \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": rpc error: code = NotFound desc = could not find container \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": container with ID starting with f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.708296 4868 scope.go:117] "RemoveContainer" containerID="d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.708637 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5"} err="failed to get container status \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": rpc error: code = NotFound desc = could not find container \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": container with ID starting with d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.708721 4868 scope.go:117] "RemoveContainer" containerID="25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.708982 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64"} err="failed to get container status \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": rpc error: code = NotFound desc = could not find container \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": container with ID starting with 25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.709071 4868 scope.go:117] "RemoveContainer" containerID="6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.709564 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2"} err="failed to get container status \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": rpc error: code = NotFound desc = could not find container \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": container with ID starting with 6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.709643 4868 scope.go:117] "RemoveContainer" containerID="f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.709972 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5"} err="failed to get container status \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": rpc error: code = NotFound desc = could not find container \"f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5\": container with ID starting with f617cf0427d9230e37469822abe6d25e8428987953a6aa0270496466267923e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.710042 4868 scope.go:117] "RemoveContainer" containerID="d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.710380 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5"} err="failed to get container status \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": rpc error: code = NotFound desc = could not find container \"d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5\": container with ID starting with d59397f799824ced3d06da058d2651efae9730be81edd830473c8d0b3b64d6e5 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.710452 4868 scope.go:117] "RemoveContainer" containerID="25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.710808 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64"} err="failed to get container status \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": rpc error: code = NotFound desc = could not find container \"25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64\": container with ID starting with 25da4c034e9bbe577071484acb0261184a79bc6ab08a2f7de24e12ffde034d64 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.710895 4868 scope.go:117] "RemoveContainer" containerID="6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.711216 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2"} err="failed to get container status \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": rpc error: code = NotFound desc = could not find container \"6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2\": container with ID starting with 6b25ab9d6e6d16cb345be5a0237273965b0dca94dd5917cb63a72ab1b25db9a2 not found: ID does not exist" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.725444 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ea1557fa-6946-4bb3-8d56-0b52c8e90d29" (UID: "ea1557fa-6946-4bb3-8d56-0b52c8e90d29"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.740029 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l4lv5\" (UniqueName: \"kubernetes.io/projected/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-kube-api-access-l4lv5\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.740097 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.740108 4868 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.740116 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.755347 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-config-data" (OuterVolumeSpecName: "config-data") pod "ea1557fa-6946-4bb3-8d56-0b52c8e90d29" (UID: "ea1557fa-6946-4bb3-8d56-0b52c8e90d29"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.842028 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ea1557fa-6946-4bb3-8d56-0b52c8e90d29-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.926436 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.943836 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.954510 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.957111 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="proxy-httpd" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.957274 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="proxy-httpd" Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.957357 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-central-agent" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.957419 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-central-agent" Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.957489 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="sg-core" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.957551 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="sg-core" Oct 03 13:12:46 crc kubenswrapper[4868]: E1003 13:12:46.957626 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-notification-agent" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.957681 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-notification-agent" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.957935 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-central-agent" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.958008 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="proxy-httpd" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.958941 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="sg-core" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.959041 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" containerName="ceilometer-notification-agent" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.961674 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.964569 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.964773 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:12:46 crc kubenswrapper[4868]: I1003 13:12:46.972171 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.148204 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-scripts\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.148257 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-log-httpd\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.148354 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.148463 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-config-data\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.148492 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-run-httpd\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.148521 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84kg7\" (UniqueName: \"kubernetes.io/projected/7c4c35b3-052d-47dc-b240-27c234aa3ebd-kube-api-access-84kg7\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.148806 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.250638 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.250747 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-scripts\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.250790 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-log-httpd\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.250858 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.250908 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-config-data\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.250926 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-run-httpd\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.250950 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84kg7\" (UniqueName: \"kubernetes.io/projected/7c4c35b3-052d-47dc-b240-27c234aa3ebd-kube-api-access-84kg7\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.251963 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-run-httpd\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.252267 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-log-httpd\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.255890 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.256344 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-scripts\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.256407 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.271782 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84kg7\" (UniqueName: \"kubernetes.io/projected/7c4c35b3-052d-47dc-b240-27c234aa3ebd-kube-api-access-84kg7\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.272366 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-config-data\") pod \"ceilometer-0\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.387762 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:12:47 crc kubenswrapper[4868]: I1003 13:12:47.934495 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:12:48 crc kubenswrapper[4868]: I1003 13:12:48.558599 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea1557fa-6946-4bb3-8d56-0b52c8e90d29" path="/var/lib/kubelet/pods/ea1557fa-6946-4bb3-8d56-0b52c8e90d29/volumes" Oct 03 13:12:48 crc kubenswrapper[4868]: I1003 13:12:48.629115 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerStarted","Data":"c0ac971c54b563b0a133d8a978378dff056c2a296c7788c9ffd962c791329fa7"} Oct 03 13:12:50 crc kubenswrapper[4868]: I1003 13:12:50.654254 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerStarted","Data":"f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c"} Oct 03 13:12:52 crc kubenswrapper[4868]: I1003 13:12:52.678583 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerStarted","Data":"d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77"} Oct 03 13:12:53 crc kubenswrapper[4868]: I1003 13:12:53.688973 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerStarted","Data":"6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d"} Oct 03 13:12:54 crc kubenswrapper[4868]: I1003 13:12:54.706227 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerStarted","Data":"1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100"} Oct 03 13:12:54 crc kubenswrapper[4868]: I1003 13:12:54.706818 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:12:54 crc kubenswrapper[4868]: I1003 13:12:54.742698 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.379982573 podStartE2EDuration="8.742665074s" podCreationTimestamp="2025-10-03 13:12:46 +0000 UTC" firstStartedPulling="2025-10-03 13:12:47.94424353 +0000 UTC m=+1364.154092596" lastFinishedPulling="2025-10-03 13:12:54.306926031 +0000 UTC m=+1370.516775097" observedRunningTime="2025-10-03 13:12:54.732509592 +0000 UTC m=+1370.942358678" watchObservedRunningTime="2025-10-03 13:12:54.742665074 +0000 UTC m=+1370.952514150" Oct 03 13:13:01 crc kubenswrapper[4868]: I1003 13:13:01.774754 4868 generic.go:334] "Generic (PLEG): container finished" podID="476384d7-320b-4e69-81ea-b5193f3c944a" containerID="f8643a2cdc5cd948282c56165245bf88d8d3cb4611968c8fa99a836c64542588" exitCode=0 Oct 03 13:13:01 crc kubenswrapper[4868]: I1003 13:13:01.774833 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" event={"ID":"476384d7-320b-4e69-81ea-b5193f3c944a","Type":"ContainerDied","Data":"f8643a2cdc5cd948282c56165245bf88d8d3cb4611968c8fa99a836c64542588"} Oct 03 13:13:02 crc kubenswrapper[4868]: I1003 13:13:02.149981 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:13:02 crc kubenswrapper[4868]: I1003 13:13:02.150075 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.140123 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.180158 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-config-data\") pod \"476384d7-320b-4e69-81ea-b5193f3c944a\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.180308 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-scripts\") pod \"476384d7-320b-4e69-81ea-b5193f3c944a\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.180384 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtkrt\" (UniqueName: \"kubernetes.io/projected/476384d7-320b-4e69-81ea-b5193f3c944a-kube-api-access-jtkrt\") pod \"476384d7-320b-4e69-81ea-b5193f3c944a\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.180419 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-combined-ca-bundle\") pod \"476384d7-320b-4e69-81ea-b5193f3c944a\" (UID: \"476384d7-320b-4e69-81ea-b5193f3c944a\") " Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.188665 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-scripts" (OuterVolumeSpecName: "scripts") pod "476384d7-320b-4e69-81ea-b5193f3c944a" (UID: "476384d7-320b-4e69-81ea-b5193f3c944a"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.189653 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/476384d7-320b-4e69-81ea-b5193f3c944a-kube-api-access-jtkrt" (OuterVolumeSpecName: "kube-api-access-jtkrt") pod "476384d7-320b-4e69-81ea-b5193f3c944a" (UID: "476384d7-320b-4e69-81ea-b5193f3c944a"). InnerVolumeSpecName "kube-api-access-jtkrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.212095 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-config-data" (OuterVolumeSpecName: "config-data") pod "476384d7-320b-4e69-81ea-b5193f3c944a" (UID: "476384d7-320b-4e69-81ea-b5193f3c944a"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.212622 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "476384d7-320b-4e69-81ea-b5193f3c944a" (UID: "476384d7-320b-4e69-81ea-b5193f3c944a"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.285743 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.285789 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.285801 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/476384d7-320b-4e69-81ea-b5193f3c944a-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.285813 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtkrt\" (UniqueName: \"kubernetes.io/projected/476384d7-320b-4e69-81ea-b5193f3c944a-kube-api-access-jtkrt\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.794755 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" event={"ID":"476384d7-320b-4e69-81ea-b5193f3c944a","Type":"ContainerDied","Data":"36fb2f0899c6774d604f981a9e426e1b24145ebcb957a6df37964ae35e9676c2"} Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.795295 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="36fb2f0899c6774d604f981a9e426e1b24145ebcb957a6df37964ae35e9676c2" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.795361 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-fwnkn" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.899579 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 13:13:03 crc kubenswrapper[4868]: E1003 13:13:03.899989 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="476384d7-320b-4e69-81ea-b5193f3c944a" containerName="nova-cell0-conductor-db-sync" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.900026 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="476384d7-320b-4e69-81ea-b5193f3c944a" containerName="nova-cell0-conductor-db-sync" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.900229 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="476384d7-320b-4e69-81ea-b5193f3c944a" containerName="nova-cell0-conductor-db-sync" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.900879 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.904845 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.904947 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-cpwt7" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.917667 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.997287 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-swxvp\" (UniqueName: \"kubernetes.io/projected/9fe73541-1e4f-4caf-9e26-7865eb2908f9-kube-api-access-swxvp\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.997338 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fe73541-1e4f-4caf-9e26-7865eb2908f9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:03 crc kubenswrapper[4868]: I1003 13:13:03.997404 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fe73541-1e4f-4caf-9e26-7865eb2908f9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.099094 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-swxvp\" (UniqueName: \"kubernetes.io/projected/9fe73541-1e4f-4caf-9e26-7865eb2908f9-kube-api-access-swxvp\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.099154 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fe73541-1e4f-4caf-9e26-7865eb2908f9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.099237 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fe73541-1e4f-4caf-9e26-7865eb2908f9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.104167 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9fe73541-1e4f-4caf-9e26-7865eb2908f9-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.104446 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9fe73541-1e4f-4caf-9e26-7865eb2908f9-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.119952 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-swxvp\" (UniqueName: \"kubernetes.io/projected/9fe73541-1e4f-4caf-9e26-7865eb2908f9-kube-api-access-swxvp\") pod \"nova-cell0-conductor-0\" (UID: \"9fe73541-1e4f-4caf-9e26-7865eb2908f9\") " pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.219023 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.711805 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Oct 03 13:13:04 crc kubenswrapper[4868]: I1003 13:13:04.807458 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9fe73541-1e4f-4caf-9e26-7865eb2908f9","Type":"ContainerStarted","Data":"18a938accf1afa37238d60ad9cd028ad3a769e592554215cc57e75d121e1e309"} Oct 03 13:13:05 crc kubenswrapper[4868]: I1003 13:13:05.823036 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"9fe73541-1e4f-4caf-9e26-7865eb2908f9","Type":"ContainerStarted","Data":"c335e3d91e2de2ee7df1245728eb7f3016c7f8901aa0f10de02ba9e49284ae0a"} Oct 03 13:13:05 crc kubenswrapper[4868]: I1003 13:13:05.823723 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:05 crc kubenswrapper[4868]: I1003 13:13:05.855512 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.855486581 podStartE2EDuration="2.855486581s" podCreationTimestamp="2025-10-03 13:13:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:05.846734656 +0000 UTC m=+1382.056583742" watchObservedRunningTime="2025-10-03 13:13:05.855486581 +0000 UTC m=+1382.065335647" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.254642 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.712464 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-89vph"] Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.714738 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.718080 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.718546 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.728227 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-89vph"] Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.856846 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.857445 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-scripts\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.857556 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.857603 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-config-data\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.857722 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdcsl\" (UniqueName: \"kubernetes.io/projected/6be42dc6-1220-4751-8201-dbcf019309ce-kube-api-access-gdcsl\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.862082 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.867582 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.889701 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.959372 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-scripts\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.959438 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.959539 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.959591 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-config-data\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.959668 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4vmjk\" (UniqueName: \"kubernetes.io/projected/06025e3f-67d5-43f8-8a1f-411eb3b835ad-kube-api-access-4vmjk\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.959715 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdcsl\" (UniqueName: \"kubernetes.io/projected/6be42dc6-1220-4751-8201-dbcf019309ce-kube-api-access-gdcsl\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.959738 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.973864 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-scripts\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.991178 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-config-data\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:14 crc kubenswrapper[4868]: I1003 13:13:14.994789 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.005159 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.007896 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.015679 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdcsl\" (UniqueName: \"kubernetes.io/projected/6be42dc6-1220-4751-8201-dbcf019309ce-kube-api-access-gdcsl\") pod \"nova-cell0-cell-mapping-89vph\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.021661 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.022399 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.039665 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.041324 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.051319 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.059287 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.070858 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d6573ee-1cc0-4116-881f-00c784f4a9b0-logs\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.070938 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4vmjk\" (UniqueName: \"kubernetes.io/projected/06025e3f-67d5-43f8-8a1f-411eb3b835ad-kube-api-access-4vmjk\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.070983 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.071030 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.071073 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.071092 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbwmm\" (UniqueName: \"kubernetes.io/projected/0d6573ee-1cc0-4116-881f-00c784f4a9b0-kube-api-access-tbwmm\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.071114 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-config-data\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.090063 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.090624 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.130609 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4vmjk\" (UniqueName: \"kubernetes.io/projected/06025e3f-67d5-43f8-8a1f-411eb3b835ad-kube-api-access-4vmjk\") pod \"nova-cell1-novncproxy-0\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.139646 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.183902 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.196638 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-config-data\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.196715 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tc7zk\" (UniqueName: \"kubernetes.io/projected/d91974be-5f48-4772-951b-e4c4ac1eadc4-kube-api-access-tc7zk\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.196784 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d6573ee-1cc0-4116-881f-00c784f4a9b0-logs\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.196949 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.197119 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.197144 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbwmm\" (UniqueName: \"kubernetes.io/projected/0d6573ee-1cc0-4116-881f-00c784f4a9b0-kube-api-access-tbwmm\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.197176 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-config-data\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.199343 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d6573ee-1cc0-4116-881f-00c784f4a9b0-logs\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.202740 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.213770 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.221441 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-config-data\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.236452 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.243808 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbwmm\" (UniqueName: \"kubernetes.io/projected/0d6573ee-1cc0-4116-881f-00c784f4a9b0-kube-api-access-tbwmm\") pod \"nova-metadata-0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.244223 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.251124 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.268575 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.270142 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-xx55n"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.276626 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.284134 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-xx55n"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.307765 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.307897 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-config-data\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.307926 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tc7zk\" (UniqueName: \"kubernetes.io/projected/d91974be-5f48-4772-951b-e4c4ac1eadc4-kube-api-access-tc7zk\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.328205 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.329851 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-config-data\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.332019 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tc7zk\" (UniqueName: \"kubernetes.io/projected/d91974be-5f48-4772-951b-e4c4ac1eadc4-kube-api-access-tc7zk\") pod \"nova-scheduler-0\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410358 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410749 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-svc\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410807 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47d81ef6-7fb4-4cd8-b217-2c034980317c-logs\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410842 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410886 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-config-data\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410927 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frdjm\" (UniqueName: \"kubernetes.io/projected/47d81ef6-7fb4-4cd8-b217-2c034980317c-kube-api-access-frdjm\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410944 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzgjh\" (UniqueName: \"kubernetes.io/projected/27f11be0-68b7-40ed-8722-96cbcc1119d0-kube-api-access-qzgjh\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.410979 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-config\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.415704 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.415868 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.518944 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frdjm\" (UniqueName: \"kubernetes.io/projected/47d81ef6-7fb4-4cd8-b217-2c034980317c-kube-api-access-frdjm\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519005 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzgjh\" (UniqueName: \"kubernetes.io/projected/27f11be0-68b7-40ed-8722-96cbcc1119d0-kube-api-access-qzgjh\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519096 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-config\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519209 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519324 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519396 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519444 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-svc\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519486 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47d81ef6-7fb4-4cd8-b217-2c034980317c-logs\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519527 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.519565 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-config-data\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.521784 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.522453 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-config\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.522640 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47d81ef6-7fb4-4cd8-b217-2c034980317c-logs\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.523828 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.523957 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-svc\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.525103 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.526680 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.527109 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-config-data\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.550223 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzgjh\" (UniqueName: \"kubernetes.io/projected/27f11be0-68b7-40ed-8722-96cbcc1119d0-kube-api-access-qzgjh\") pod \"dnsmasq-dns-757b4f8459-xx55n\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.557849 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frdjm\" (UniqueName: \"kubernetes.io/projected/47d81ef6-7fb4-4cd8-b217-2c034980317c-kube-api-access-frdjm\") pod \"nova-api-0\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.607219 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.647422 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.665612 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.833314 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-89vph"] Oct 03 13:13:15 crc kubenswrapper[4868]: W1003 13:13:15.863262 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6be42dc6_1220_4751_8201_dbcf019309ce.slice/crio-aff7fd156526d5a97b47d2606ac32b488d0904dc55ae512944c98bd457633351 WatchSource:0}: Error finding container aff7fd156526d5a97b47d2606ac32b488d0904dc55ae512944c98bd457633351: Status 404 returned error can't find the container with id aff7fd156526d5a97b47d2606ac32b488d0904dc55ae512944c98bd457633351 Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.935088 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7km5p"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.936327 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.943921 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.944332 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.956800 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7km5p"] Oct 03 13:13:15 crc kubenswrapper[4868]: I1003 13:13:15.960846 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-89vph" event={"ID":"6be42dc6-1220-4751-8201-dbcf019309ce","Type":"ContainerStarted","Data":"aff7fd156526d5a97b47d2606ac32b488d0904dc55ae512944c98bd457633351"} Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.009336 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.019342 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.036169 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-scripts\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.036227 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-config-data\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.036268 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44jg4\" (UniqueName: \"kubernetes.io/projected/52fc4aac-675f-4824-9a11-e41a71de1c88-kube-api-access-44jg4\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.036357 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.138575 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-scripts\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.138650 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-config-data\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.138705 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44jg4\" (UniqueName: \"kubernetes.io/projected/52fc4aac-675f-4824-9a11-e41a71de1c88-kube-api-access-44jg4\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.138802 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.144008 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-scripts\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.144585 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.145182 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-config-data\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.156305 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44jg4\" (UniqueName: \"kubernetes.io/projected/52fc4aac-675f-4824-9a11-e41a71de1c88-kube-api-access-44jg4\") pod \"nova-cell1-conductor-db-sync-7km5p\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.220852 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.312797 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.327692 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:16 crc kubenswrapper[4868]: W1003 13:13:16.343463 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod47d81ef6_7fb4_4cd8_b217_2c034980317c.slice/crio-0f420d7e7b4f91763b38fa800aacece7f008001b43e1b27fc2fb9b52af39b200 WatchSource:0}: Error finding container 0f420d7e7b4f91763b38fa800aacece7f008001b43e1b27fc2fb9b52af39b200: Status 404 returned error can't find the container with id 0f420d7e7b4f91763b38fa800aacece7f008001b43e1b27fc2fb9b52af39b200 Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.519192 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-xx55n"] Oct 03 13:13:16 crc kubenswrapper[4868]: W1003 13:13:16.541534 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod27f11be0_68b7_40ed_8722_96cbcc1119d0.slice/crio-772bb2a6fa15f50e192f51e5ad2697d4f99020783bd28a2546c9b5b0b58459b5 WatchSource:0}: Error finding container 772bb2a6fa15f50e192f51e5ad2697d4f99020783bd28a2546c9b5b0b58459b5: Status 404 returned error can't find the container with id 772bb2a6fa15f50e192f51e5ad2697d4f99020783bd28a2546c9b5b0b58459b5 Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.817961 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7km5p"] Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.982449 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7km5p" event={"ID":"52fc4aac-675f-4824-9a11-e41a71de1c88","Type":"ContainerStarted","Data":"2e0f4e088b48fadeb1facaee6ec4a575940f83e8a9a1cc9f8851abb3ac05edca"} Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.989295 4868 generic.go:334] "Generic (PLEG): container finished" podID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerID="7acd3819d427feb1ae6cb77e2164b79358e9594bf61655151164bfeec8251bfb" exitCode=0 Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.989933 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" event={"ID":"27f11be0-68b7-40ed-8722-96cbcc1119d0","Type":"ContainerDied","Data":"7acd3819d427feb1ae6cb77e2164b79358e9594bf61655151164bfeec8251bfb"} Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.993148 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" event={"ID":"27f11be0-68b7-40ed-8722-96cbcc1119d0","Type":"ContainerStarted","Data":"772bb2a6fa15f50e192f51e5ad2697d4f99020783bd28a2546c9b5b0b58459b5"} Oct 03 13:13:16 crc kubenswrapper[4868]: I1003 13:13:16.996514 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d91974be-5f48-4772-951b-e4c4ac1eadc4","Type":"ContainerStarted","Data":"52510002b30a5d24cf062b695ee35812a2bc417295307cd4f59e27999ec5350a"} Oct 03 13:13:17 crc kubenswrapper[4868]: I1003 13:13:17.000022 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"06025e3f-67d5-43f8-8a1f-411eb3b835ad","Type":"ContainerStarted","Data":"3ab1d3299a35f742e7fbf64acd055fe00cc8696880d84d5bde04827ea307594d"} Oct 03 13:13:17 crc kubenswrapper[4868]: I1003 13:13:17.027213 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-89vph" event={"ID":"6be42dc6-1220-4751-8201-dbcf019309ce","Type":"ContainerStarted","Data":"697b9bff66083546f15f478a2f1e57a3755b1b658cdc7319a0a6f9e48b1e2458"} Oct 03 13:13:17 crc kubenswrapper[4868]: I1003 13:13:17.032106 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d6573ee-1cc0-4116-881f-00c784f4a9b0","Type":"ContainerStarted","Data":"9ebb7b99f0c3199c01ec80a99b25c65bda1bde4945e571e7c02aff92b3d9b330"} Oct 03 13:13:17 crc kubenswrapper[4868]: I1003 13:13:17.038541 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47d81ef6-7fb4-4cd8-b217-2c034980317c","Type":"ContainerStarted","Data":"0f420d7e7b4f91763b38fa800aacece7f008001b43e1b27fc2fb9b52af39b200"} Oct 03 13:13:17 crc kubenswrapper[4868]: I1003 13:13:17.070579 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-89vph" podStartSLOduration=3.07054539 podStartE2EDuration="3.07054539s" podCreationTimestamp="2025-10-03 13:13:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:17.052414524 +0000 UTC m=+1393.262263600" watchObservedRunningTime="2025-10-03 13:13:17.07054539 +0000 UTC m=+1393.280394446" Oct 03 13:13:17 crc kubenswrapper[4868]: I1003 13:13:17.402570 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 13:13:18 crc kubenswrapper[4868]: I1003 13:13:18.065254 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7km5p" event={"ID":"52fc4aac-675f-4824-9a11-e41a71de1c88","Type":"ContainerStarted","Data":"6d9c47d21a1dfc03f815bcc1923aa1d91e62d31cfcee13fee16f2b0f6c86109b"} Oct 03 13:13:18 crc kubenswrapper[4868]: I1003 13:13:18.076791 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" event={"ID":"27f11be0-68b7-40ed-8722-96cbcc1119d0","Type":"ContainerStarted","Data":"af51c0ecbcb110c663097779aedb7069b6ef059fcc735b157c862f59848c0419"} Oct 03 13:13:18 crc kubenswrapper[4868]: I1003 13:13:18.076933 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:18 crc kubenswrapper[4868]: I1003 13:13:18.114470 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-7km5p" podStartSLOduration=3.114438877 podStartE2EDuration="3.114438877s" podCreationTimestamp="2025-10-03 13:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:18.099824836 +0000 UTC m=+1394.309673922" watchObservedRunningTime="2025-10-03 13:13:18.114438877 +0000 UTC m=+1394.324287943" Oct 03 13:13:18 crc kubenswrapper[4868]: I1003 13:13:18.124468 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" podStartSLOduration=3.124446695 podStartE2EDuration="3.124446695s" podCreationTimestamp="2025-10-03 13:13:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:18.122179615 +0000 UTC m=+1394.332028701" watchObservedRunningTime="2025-10-03 13:13:18.124446695 +0000 UTC m=+1394.334295761" Oct 03 13:13:18 crc kubenswrapper[4868]: I1003 13:13:18.680862 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:18 crc kubenswrapper[4868]: I1003 13:13:18.700835 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.104389 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47d81ef6-7fb4-4cd8-b217-2c034980317c","Type":"ContainerStarted","Data":"b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18"} Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.105027 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47d81ef6-7fb4-4cd8-b217-2c034980317c","Type":"ContainerStarted","Data":"9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309"} Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.106699 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d91974be-5f48-4772-951b-e4c4ac1eadc4","Type":"ContainerStarted","Data":"94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60"} Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.109508 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-log" containerID="cri-o://14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314" gracePeriod=30 Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.109762 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d6573ee-1cc0-4116-881f-00c784f4a9b0","Type":"ContainerStarted","Data":"0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989"} Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.109786 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d6573ee-1cc0-4116-881f-00c784f4a9b0","Type":"ContainerStarted","Data":"14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314"} Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.109834 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-metadata" containerID="cri-o://0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989" gracePeriod=30 Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.134715 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.083461977 podStartE2EDuration="5.134686213s" podCreationTimestamp="2025-10-03 13:13:15 +0000 UTC" firstStartedPulling="2025-10-03 13:13:16.348725237 +0000 UTC m=+1392.558574313" lastFinishedPulling="2025-10-03 13:13:19.399949483 +0000 UTC m=+1395.609798549" observedRunningTime="2025-10-03 13:13:20.12560606 +0000 UTC m=+1396.335455136" watchObservedRunningTime="2025-10-03 13:13:20.134686213 +0000 UTC m=+1396.344535279" Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.158633 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.085186192 podStartE2EDuration="6.158603924s" podCreationTimestamp="2025-10-03 13:13:14 +0000 UTC" firstStartedPulling="2025-10-03 13:13:16.322960606 +0000 UTC m=+1392.532809672" lastFinishedPulling="2025-10-03 13:13:19.396378338 +0000 UTC m=+1395.606227404" observedRunningTime="2025-10-03 13:13:20.144423034 +0000 UTC m=+1396.354272100" watchObservedRunningTime="2025-10-03 13:13:20.158603924 +0000 UTC m=+1396.368452990" Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.179939 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.8955755180000002 podStartE2EDuration="6.179910035s" podCreationTimestamp="2025-10-03 13:13:14 +0000 UTC" firstStartedPulling="2025-10-03 13:13:16.112123483 +0000 UTC m=+1392.321972549" lastFinishedPulling="2025-10-03 13:13:19.39645801 +0000 UTC m=+1395.606307066" observedRunningTime="2025-10-03 13:13:20.170628637 +0000 UTC m=+1396.380477713" watchObservedRunningTime="2025-10-03 13:13:20.179910035 +0000 UTC m=+1396.389759101" Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.269359 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.269833 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:13:20 crc kubenswrapper[4868]: I1003 13:13:20.609864 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 13:13:21 crc kubenswrapper[4868]: I1003 13:13:21.124495 4868 generic.go:334] "Generic (PLEG): container finished" podID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerID="14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314" exitCode=143 Oct 03 13:13:21 crc kubenswrapper[4868]: I1003 13:13:21.125391 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d6573ee-1cc0-4116-881f-00c784f4a9b0","Type":"ContainerDied","Data":"14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314"} Oct 03 13:13:22 crc kubenswrapper[4868]: I1003 13:13:22.378934 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:13:22 crc kubenswrapper[4868]: I1003 13:13:22.379718 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="5e8d2299-d5c9-413e-bb20-ace3833587fb" containerName="kube-state-metrics" containerID="cri-o://fe69a2935c5d64392a9ab31b1c0d0cc2fa16ff4c993299e61227ddf60b7d3eef" gracePeriod=30 Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.160238 4868 generic.go:334] "Generic (PLEG): container finished" podID="5e8d2299-d5c9-413e-bb20-ace3833587fb" containerID="fe69a2935c5d64392a9ab31b1c0d0cc2fa16ff4c993299e61227ddf60b7d3eef" exitCode=2 Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.161076 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e8d2299-d5c9-413e-bb20-ace3833587fb","Type":"ContainerDied","Data":"fe69a2935c5d64392a9ab31b1c0d0cc2fa16ff4c993299e61227ddf60b7d3eef"} Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.161126 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5e8d2299-d5c9-413e-bb20-ace3833587fb","Type":"ContainerDied","Data":"9626a20f387247ef9c62603e51b04ad6da3232279efa86539b1cef1fdec52553"} Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.161146 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9626a20f387247ef9c62603e51b04ad6da3232279efa86539b1cef1fdec52553" Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.163716 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"06025e3f-67d5-43f8-8a1f-411eb3b835ad","Type":"ContainerStarted","Data":"5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0"} Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.163850 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="06025e3f-67d5-43f8-8a1f-411eb3b835ad" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0" gracePeriod=30 Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.188780 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.451493351 podStartE2EDuration="9.188758035s" podCreationTimestamp="2025-10-03 13:13:14 +0000 UTC" firstStartedPulling="2025-10-03 13:13:16.043029081 +0000 UTC m=+1392.252878147" lastFinishedPulling="2025-10-03 13:13:22.780293765 +0000 UTC m=+1398.990142831" observedRunningTime="2025-10-03 13:13:23.184202263 +0000 UTC m=+1399.394051349" watchObservedRunningTime="2025-10-03 13:13:23.188758035 +0000 UTC m=+1399.398607101" Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.204242 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.343100 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hkhhh\" (UniqueName: \"kubernetes.io/projected/5e8d2299-d5c9-413e-bb20-ace3833587fb-kube-api-access-hkhhh\") pod \"5e8d2299-d5c9-413e-bb20-ace3833587fb\" (UID: \"5e8d2299-d5c9-413e-bb20-ace3833587fb\") " Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.353305 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e8d2299-d5c9-413e-bb20-ace3833587fb-kube-api-access-hkhhh" (OuterVolumeSpecName: "kube-api-access-hkhhh") pod "5e8d2299-d5c9-413e-bb20-ace3833587fb" (UID: "5e8d2299-d5c9-413e-bb20-ace3833587fb"). InnerVolumeSpecName "kube-api-access-hkhhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:23 crc kubenswrapper[4868]: I1003 13:13:23.447393 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hkhhh\" (UniqueName: \"kubernetes.io/projected/5e8d2299-d5c9-413e-bb20-ace3833587fb-kube-api-access-hkhhh\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.176242 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.214705 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.230643 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.254447 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:13:24 crc kubenswrapper[4868]: E1003 13:13:24.255125 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e8d2299-d5c9-413e-bb20-ace3833587fb" containerName="kube-state-metrics" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.255192 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e8d2299-d5c9-413e-bb20-ace3833587fb" containerName="kube-state-metrics" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.255522 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e8d2299-d5c9-413e-bb20-ace3833587fb" containerName="kube-state-metrics" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.256543 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.262290 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.262553 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.289501 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.365856 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvr9r\" (UniqueName: \"kubernetes.io/projected/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-api-access-bvr9r\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.366168 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.366340 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.366604 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.468950 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.469096 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.469181 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvr9r\" (UniqueName: \"kubernetes.io/projected/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-api-access-bvr9r\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.469230 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.478199 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.479039 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.488324 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.494078 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvr9r\" (UniqueName: \"kubernetes.io/projected/a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8-kube-api-access-bvr9r\") pod \"kube-state-metrics-0\" (UID: \"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8\") " pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.558058 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e8d2299-d5c9-413e-bb20-ace3833587fb" path="/var/lib/kubelet/pods/5e8d2299-d5c9-413e-bb20-ace3833587fb/volumes" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.596414 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.634488 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.634879 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-central-agent" containerID="cri-o://f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c" gracePeriod=30 Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.634997 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="sg-core" containerID="cri-o://6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d" gracePeriod=30 Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.635385 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="proxy-httpd" containerID="cri-o://1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100" gracePeriod=30 Oct 03 13:13:24 crc kubenswrapper[4868]: I1003 13:13:24.635056 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-notification-agent" containerID="cri-o://d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77" gracePeriod=30 Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.186104 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.190753 4868 generic.go:334] "Generic (PLEG): container finished" podID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerID="1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100" exitCode=0 Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.190795 4868 generic.go:334] "Generic (PLEG): container finished" podID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerID="6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d" exitCode=2 Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.190824 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerDied","Data":"1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100"} Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.190861 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerDied","Data":"6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d"} Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.207381 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Oct 03 13:13:25 crc kubenswrapper[4868]: W1003 13:13:25.214864 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda2f50a43_ea0f_4242_b3a1_4fbf1f76fca8.slice/crio-9bccb59c8d920413930de4f7c08f1889677f258878df8b6114c57c2b58c2c7b1 WatchSource:0}: Error finding container 9bccb59c8d920413930de4f7c08f1889677f258878df8b6114c57c2b58c2c7b1: Status 404 returned error can't find the container with id 9bccb59c8d920413930de4f7c08f1889677f258878df8b6114c57c2b58c2c7b1 Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.219179 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.608801 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.647933 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.647979 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.648053 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.667418 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.739863 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-kbv6c"] Oct 03 13:13:25 crc kubenswrapper[4868]: I1003 13:13:25.740540 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" podUID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerName="dnsmasq-dns" containerID="cri-o://6b88c9421990d1b6b61feb967f45e639b0ae753850fb5b836b62a15967606c3e" gracePeriod=10 Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.216878 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8","Type":"ContainerStarted","Data":"dcf2681bfc3a27537004214177d9a0582ca29b4312a4ed55dc04c3cca1ac69fc"} Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.216927 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8","Type":"ContainerStarted","Data":"9bccb59c8d920413930de4f7c08f1889677f258878df8b6114c57c2b58c2c7b1"} Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.218267 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.230292 4868 generic.go:334] "Generic (PLEG): container finished" podID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerID="f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c" exitCode=0 Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.230386 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerDied","Data":"f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c"} Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.245422 4868 generic.go:334] "Generic (PLEG): container finished" podID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerID="6b88c9421990d1b6b61feb967f45e639b0ae753850fb5b836b62a15967606c3e" exitCode=0 Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.245793 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" event={"ID":"0e7dcbce-9519-46bf-a3b1-154a95f285f3","Type":"ContainerDied","Data":"6b88c9421990d1b6b61feb967f45e639b0ae753850fb5b836b62a15967606c3e"} Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.250019 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=1.880342231 podStartE2EDuration="2.249971971s" podCreationTimestamp="2025-10-03 13:13:24 +0000 UTC" firstStartedPulling="2025-10-03 13:13:25.218820034 +0000 UTC m=+1401.428669100" lastFinishedPulling="2025-10-03 13:13:25.588449774 +0000 UTC m=+1401.798298840" observedRunningTime="2025-10-03 13:13:26.235461631 +0000 UTC m=+1402.445310707" watchObservedRunningTime="2025-10-03 13:13:26.249971971 +0000 UTC m=+1402.459821047" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.305918 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.513388 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.582446 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-swift-storage-0\") pod \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.582625 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jgdcw\" (UniqueName: \"kubernetes.io/projected/0e7dcbce-9519-46bf-a3b1-154a95f285f3-kube-api-access-jgdcw\") pod \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.582692 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-nb\") pod \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.582786 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-config\") pod \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.582880 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-svc\") pod \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.582921 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-sb\") pod \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\" (UID: \"0e7dcbce-9519-46bf-a3b1-154a95f285f3\") " Oct 03 13:13:26 crc kubenswrapper[4868]: E1003 13:13:26.600339 4868 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6be42dc6_1220_4751_8201_dbcf019309ce.slice/crio-conmon-697b9bff66083546f15f478a2f1e57a3755b1b658cdc7319a0a6f9e48b1e2458.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6be42dc6_1220_4751_8201_dbcf019309ce.slice/crio-697b9bff66083546f15f478a2f1e57a3755b1b658cdc7319a0a6f9e48b1e2458.scope\": RecentStats: unable to find data in memory cache]" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.618418 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e7dcbce-9519-46bf-a3b1-154a95f285f3-kube-api-access-jgdcw" (OuterVolumeSpecName: "kube-api-access-jgdcw") pod "0e7dcbce-9519-46bf-a3b1-154a95f285f3" (UID: "0e7dcbce-9519-46bf-a3b1-154a95f285f3"). InnerVolumeSpecName "kube-api-access-jgdcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.663824 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-config" (OuterVolumeSpecName: "config") pod "0e7dcbce-9519-46bf-a3b1-154a95f285f3" (UID: "0e7dcbce-9519-46bf-a3b1-154a95f285f3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.679679 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "0e7dcbce-9519-46bf-a3b1-154a95f285f3" (UID: "0e7dcbce-9519-46bf-a3b1-154a95f285f3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.687214 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.687270 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jgdcw\" (UniqueName: \"kubernetes.io/projected/0e7dcbce-9519-46bf-a3b1-154a95f285f3-kube-api-access-jgdcw\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.687286 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.696454 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "0e7dcbce-9519-46bf-a3b1-154a95f285f3" (UID: "0e7dcbce-9519-46bf-a3b1-154a95f285f3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.699647 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "0e7dcbce-9519-46bf-a3b1-154a95f285f3" (UID: "0e7dcbce-9519-46bf-a3b1-154a95f285f3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.702611 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "0e7dcbce-9519-46bf-a3b1-154a95f285f3" (UID: "0e7dcbce-9519-46bf-a3b1-154a95f285f3"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.730591 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.730670 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.187:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.790571 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.790626 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:26 crc kubenswrapper[4868]: I1003 13:13:26.790641 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/0e7dcbce-9519-46bf-a3b1-154a95f285f3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.261158 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" event={"ID":"0e7dcbce-9519-46bf-a3b1-154a95f285f3","Type":"ContainerDied","Data":"1eb01ee7bd7e93957145656dff64aafcc85311e32d73d0dc6b62f485dffac8d7"} Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.261581 4868 scope.go:117] "RemoveContainer" containerID="6b88c9421990d1b6b61feb967f45e639b0ae753850fb5b836b62a15967606c3e" Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.261183 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-kbv6c" Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.265172 4868 generic.go:334] "Generic (PLEG): container finished" podID="6be42dc6-1220-4751-8201-dbcf019309ce" containerID="697b9bff66083546f15f478a2f1e57a3755b1b658cdc7319a0a6f9e48b1e2458" exitCode=0 Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.266264 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-89vph" event={"ID":"6be42dc6-1220-4751-8201-dbcf019309ce","Type":"ContainerDied","Data":"697b9bff66083546f15f478a2f1e57a3755b1b658cdc7319a0a6f9e48b1e2458"} Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.316165 4868 scope.go:117] "RemoveContainer" containerID="e3e725c7e7082ecfc524026ffedc632fe31dabb13cbea45c89ba3e8c6a5adc3c" Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.355335 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-kbv6c"] Oct 03 13:13:27 crc kubenswrapper[4868]: I1003 13:13:27.365399 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-kbv6c"] Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.557586 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" path="/var/lib/kubelet/pods/0e7dcbce-9519-46bf-a3b1-154a95f285f3/volumes" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.713681 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.832863 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-scripts\") pod \"6be42dc6-1220-4751-8201-dbcf019309ce\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.833090 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-combined-ca-bundle\") pod \"6be42dc6-1220-4751-8201-dbcf019309ce\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.833131 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-config-data\") pod \"6be42dc6-1220-4751-8201-dbcf019309ce\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.833205 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gdcsl\" (UniqueName: \"kubernetes.io/projected/6be42dc6-1220-4751-8201-dbcf019309ce-kube-api-access-gdcsl\") pod \"6be42dc6-1220-4751-8201-dbcf019309ce\" (UID: \"6be42dc6-1220-4751-8201-dbcf019309ce\") " Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.840960 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-scripts" (OuterVolumeSpecName: "scripts") pod "6be42dc6-1220-4751-8201-dbcf019309ce" (UID: "6be42dc6-1220-4751-8201-dbcf019309ce"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.845340 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6be42dc6-1220-4751-8201-dbcf019309ce-kube-api-access-gdcsl" (OuterVolumeSpecName: "kube-api-access-gdcsl") pod "6be42dc6-1220-4751-8201-dbcf019309ce" (UID: "6be42dc6-1220-4751-8201-dbcf019309ce"). InnerVolumeSpecName "kube-api-access-gdcsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.879730 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-config-data" (OuterVolumeSpecName: "config-data") pod "6be42dc6-1220-4751-8201-dbcf019309ce" (UID: "6be42dc6-1220-4751-8201-dbcf019309ce"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.888888 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6be42dc6-1220-4751-8201-dbcf019309ce" (UID: "6be42dc6-1220-4751-8201-dbcf019309ce"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.936844 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.937252 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.937413 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6be42dc6-1220-4751-8201-dbcf019309ce-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:28 crc kubenswrapper[4868]: I1003 13:13:28.937512 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gdcsl\" (UniqueName: \"kubernetes.io/projected/6be42dc6-1220-4751-8201-dbcf019309ce-kube-api-access-gdcsl\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.314987 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.332487 4868 generic.go:334] "Generic (PLEG): container finished" podID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerID="d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77" exitCode=0 Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.332604 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerDied","Data":"d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77"} Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.332636 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7c4c35b3-052d-47dc-b240-27c234aa3ebd","Type":"ContainerDied","Data":"c0ac971c54b563b0a133d8a978378dff056c2a296c7788c9ffd962c791329fa7"} Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.332656 4868 scope.go:117] "RemoveContainer" containerID="1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.348448 4868 generic.go:334] "Generic (PLEG): container finished" podID="52fc4aac-675f-4824-9a11-e41a71de1c88" containerID="6d9c47d21a1dfc03f815bcc1923aa1d91e62d31cfcee13fee16f2b0f6c86109b" exitCode=0 Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.348495 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7km5p" event={"ID":"52fc4aac-675f-4824-9a11-e41a71de1c88","Type":"ContainerDied","Data":"6d9c47d21a1dfc03f815bcc1923aa1d91e62d31cfcee13fee16f2b0f6c86109b"} Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.348571 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-combined-ca-bundle\") pod \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.348675 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-scripts\") pod \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.348755 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-sg-core-conf-yaml\") pod \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.348840 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84kg7\" (UniqueName: \"kubernetes.io/projected/7c4c35b3-052d-47dc-b240-27c234aa3ebd-kube-api-access-84kg7\") pod \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.349013 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-log-httpd\") pod \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.349142 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-run-httpd\") pod \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.349174 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-config-data\") pod \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\" (UID: \"7c4c35b3-052d-47dc-b240-27c234aa3ebd\") " Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.360299 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7c4c35b3-052d-47dc-b240-27c234aa3ebd" (UID: "7c4c35b3-052d-47dc-b240-27c234aa3ebd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.360513 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-89vph" event={"ID":"6be42dc6-1220-4751-8201-dbcf019309ce","Type":"ContainerDied","Data":"aff7fd156526d5a97b47d2606ac32b488d0904dc55ae512944c98bd457633351"} Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.360547 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aff7fd156526d5a97b47d2606ac32b488d0904dc55ae512944c98bd457633351" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.360745 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-89vph" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.360738 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7c4c35b3-052d-47dc-b240-27c234aa3ebd" (UID: "7c4c35b3-052d-47dc-b240-27c234aa3ebd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.360857 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-scripts" (OuterVolumeSpecName: "scripts") pod "7c4c35b3-052d-47dc-b240-27c234aa3ebd" (UID: "7c4c35b3-052d-47dc-b240-27c234aa3ebd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.379273 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c4c35b3-052d-47dc-b240-27c234aa3ebd-kube-api-access-84kg7" (OuterVolumeSpecName: "kube-api-access-84kg7") pod "7c4c35b3-052d-47dc-b240-27c234aa3ebd" (UID: "7c4c35b3-052d-47dc-b240-27c234aa3ebd"). InnerVolumeSpecName "kube-api-access-84kg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.384471 4868 scope.go:117] "RemoveContainer" containerID="6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.404790 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7c4c35b3-052d-47dc-b240-27c234aa3ebd" (UID: "7c4c35b3-052d-47dc-b240-27c234aa3ebd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.411964 4868 scope.go:117] "RemoveContainer" containerID="d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.449583 4868 scope.go:117] "RemoveContainer" containerID="f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.453618 4868 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.453651 4868 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7c4c35b3-052d-47dc-b240-27c234aa3ebd-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.453661 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.453673 4868 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.453684 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84kg7\" (UniqueName: \"kubernetes.io/projected/7c4c35b3-052d-47dc-b240-27c234aa3ebd-kube-api-access-84kg7\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.500980 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.504291 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-log" containerID="cri-o://9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309" gracePeriod=30 Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.504996 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-api" containerID="cri-o://b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18" gracePeriod=30 Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.526429 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.526708 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="d91974be-5f48-4772-951b-e4c4ac1eadc4" containerName="nova-scheduler-scheduler" containerID="cri-o://94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" gracePeriod=30 Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.546545 4868 scope.go:117] "RemoveContainer" containerID="1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100" Oct 03 13:13:29 crc kubenswrapper[4868]: E1003 13:13:29.548431 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100\": container with ID starting with 1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100 not found: ID does not exist" containerID="1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.548491 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100"} err="failed to get container status \"1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100\": rpc error: code = NotFound desc = could not find container \"1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100\": container with ID starting with 1e5d7fa4486405cddfe8a29e608ee636ee8a09352f733746ea5f37ee8d70f100 not found: ID does not exist" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.548529 4868 scope.go:117] "RemoveContainer" containerID="6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d" Oct 03 13:13:29 crc kubenswrapper[4868]: E1003 13:13:29.549902 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d\": container with ID starting with 6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d not found: ID does not exist" containerID="6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.549968 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d"} err="failed to get container status \"6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d\": rpc error: code = NotFound desc = could not find container \"6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d\": container with ID starting with 6ae12e35312b122e75527a6bbd1bc031b376608fa928122bff7bbfea70dc180d not found: ID does not exist" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.550015 4868 scope.go:117] "RemoveContainer" containerID="d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77" Oct 03 13:13:29 crc kubenswrapper[4868]: E1003 13:13:29.550443 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77\": container with ID starting with d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77 not found: ID does not exist" containerID="d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.550493 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77"} err="failed to get container status \"d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77\": rpc error: code = NotFound desc = could not find container \"d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77\": container with ID starting with d5cc832f4fea99bc2cb39e37a9015cf280087bdaf00f57b3b34205430fc22f77 not found: ID does not exist" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.550518 4868 scope.go:117] "RemoveContainer" containerID="f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c" Oct 03 13:13:29 crc kubenswrapper[4868]: E1003 13:13:29.550932 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c\": container with ID starting with f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c not found: ID does not exist" containerID="f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.551010 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c"} err="failed to get container status \"f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c\": rpc error: code = NotFound desc = could not find container \"f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c\": container with ID starting with f97d64bcc759a45960972d8a5e99774644049c53b98fd08be43a007be247a11c not found: ID does not exist" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.566205 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7c4c35b3-052d-47dc-b240-27c234aa3ebd" (UID: "7c4c35b3-052d-47dc-b240-27c234aa3ebd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.577224 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-config-data" (OuterVolumeSpecName: "config-data") pod "7c4c35b3-052d-47dc-b240-27c234aa3ebd" (UID: "7c4c35b3-052d-47dc-b240-27c234aa3ebd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.658424 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:29 crc kubenswrapper[4868]: I1003 13:13:29.658484 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7c4c35b3-052d-47dc-b240-27c234aa3ebd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.372694 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.375426 4868 generic.go:334] "Generic (PLEG): container finished" podID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerID="9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309" exitCode=143 Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.375467 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47d81ef6-7fb4-4cd8-b217-2c034980317c","Type":"ContainerDied","Data":"9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309"} Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.414622 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.423238 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.439499 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.440201 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-central-agent" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440232 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-central-agent" Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.440256 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerName="dnsmasq-dns" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440266 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerName="dnsmasq-dns" Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.440287 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="sg-core" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440295 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="sg-core" Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.440319 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6be42dc6-1220-4751-8201-dbcf019309ce" containerName="nova-manage" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440329 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="6be42dc6-1220-4751-8201-dbcf019309ce" containerName="nova-manage" Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.440343 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-notification-agent" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440351 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-notification-agent" Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.440370 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerName="init" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440380 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerName="init" Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.440398 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="proxy-httpd" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440407 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="proxy-httpd" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440644 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="6be42dc6-1220-4751-8201-dbcf019309ce" containerName="nova-manage" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440658 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-central-agent" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440682 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="ceilometer-notification-agent" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440699 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e7dcbce-9519-46bf-a3b1-154a95f285f3" containerName="dnsmasq-dns" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440711 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="proxy-httpd" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.440726 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" containerName="sg-core" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.444350 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.447755 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.453830 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.454201 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.474316 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476494 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476602 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476669 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-config-data\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476710 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-log-httpd\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476738 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7wmn\" (UniqueName: \"kubernetes.io/projected/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-kube-api-access-p7wmn\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476765 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476811 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-scripts\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.476841 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-run-httpd\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.565493 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c4c35b3-052d-47dc-b240-27c234aa3ebd" path="/var/lib/kubelet/pods/7c4c35b3-052d-47dc-b240-27c234aa3ebd/volumes" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.579582 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-config-data\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.579667 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-log-httpd\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.579706 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7wmn\" (UniqueName: \"kubernetes.io/projected/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-kube-api-access-p7wmn\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.579744 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.579805 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-scripts\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.579846 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-run-httpd\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.579926 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.580092 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.581614 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-run-httpd\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.581678 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-log-httpd\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.588801 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.589146 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-scripts\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.589478 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.591329 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-config-data\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.600241 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.611407 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7wmn\" (UniqueName: \"kubernetes.io/projected/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-kube-api-access-p7wmn\") pod \"ceilometer-0\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.619421 4868 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.623036 4868 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.625163 4868 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:13:30 crc kubenswrapper[4868]: E1003 13:13:30.625214 4868 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="d91974be-5f48-4772-951b-e4c4ac1eadc4" containerName="nova-scheduler-scheduler" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.821516 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:13:30 crc kubenswrapper[4868]: I1003 13:13:30.966772 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.005999 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-combined-ca-bundle\") pod \"52fc4aac-675f-4824-9a11-e41a71de1c88\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.043901 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "52fc4aac-675f-4824-9a11-e41a71de1c88" (UID: "52fc4aac-675f-4824-9a11-e41a71de1c88"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.108750 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44jg4\" (UniqueName: \"kubernetes.io/projected/52fc4aac-675f-4824-9a11-e41a71de1c88-kube-api-access-44jg4\") pod \"52fc4aac-675f-4824-9a11-e41a71de1c88\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.109129 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-scripts\") pod \"52fc4aac-675f-4824-9a11-e41a71de1c88\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.109222 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-config-data\") pod \"52fc4aac-675f-4824-9a11-e41a71de1c88\" (UID: \"52fc4aac-675f-4824-9a11-e41a71de1c88\") " Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.109844 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.114637 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52fc4aac-675f-4824-9a11-e41a71de1c88-kube-api-access-44jg4" (OuterVolumeSpecName: "kube-api-access-44jg4") pod "52fc4aac-675f-4824-9a11-e41a71de1c88" (UID: "52fc4aac-675f-4824-9a11-e41a71de1c88"). InnerVolumeSpecName "kube-api-access-44jg4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.115263 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-scripts" (OuterVolumeSpecName: "scripts") pod "52fc4aac-675f-4824-9a11-e41a71de1c88" (UID: "52fc4aac-675f-4824-9a11-e41a71de1c88"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.141544 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-config-data" (OuterVolumeSpecName: "config-data") pod "52fc4aac-675f-4824-9a11-e41a71de1c88" (UID: "52fc4aac-675f-4824-9a11-e41a71de1c88"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.211888 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.211925 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52fc4aac-675f-4824-9a11-e41a71de1c88-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.211936 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44jg4\" (UniqueName: \"kubernetes.io/projected/52fc4aac-675f-4824-9a11-e41a71de1c88-kube-api-access-44jg4\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.317517 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.392431 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerStarted","Data":"608780fdd43b7946e6e47899f5522f56b7c747bf39ad99bb2181b6d193327e1a"} Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.397216 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-7km5p" event={"ID":"52fc4aac-675f-4824-9a11-e41a71de1c88","Type":"ContainerDied","Data":"2e0f4e088b48fadeb1facaee6ec4a575940f83e8a9a1cc9f8851abb3ac05edca"} Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.397379 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2e0f4e088b48fadeb1facaee6ec4a575940f83e8a9a1cc9f8851abb3ac05edca" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.397486 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-7km5p" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.479534 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 13:13:31 crc kubenswrapper[4868]: E1003 13:13:31.480174 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52fc4aac-675f-4824-9a11-e41a71de1c88" containerName="nova-cell1-conductor-db-sync" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.480204 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="52fc4aac-675f-4824-9a11-e41a71de1c88" containerName="nova-cell1-conductor-db-sync" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.480488 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="52fc4aac-675f-4824-9a11-e41a71de1c88" containerName="nova-cell1-conductor-db-sync" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.482967 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.487525 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.494744 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.626079 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.626142 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.626361 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlzcd\" (UniqueName: \"kubernetes.io/projected/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-kube-api-access-nlzcd\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.727651 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlzcd\" (UniqueName: \"kubernetes.io/projected/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-kube-api-access-nlzcd\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.727811 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.727832 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.734118 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.734471 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.747146 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlzcd\" (UniqueName: \"kubernetes.io/projected/90aa0fe0-12ad-4477-b6f3-7ce50f480fd9-kube-api-access-nlzcd\") pod \"nova-cell1-conductor-0\" (UID: \"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9\") " pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:31 crc kubenswrapper[4868]: I1003 13:13:31.809524 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.146656 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.147241 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.147313 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.148098 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8b77235572aa8a2f22498a0d10a4db0ccbdcf4a0e02d3864bbf22793349616b6"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.148158 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://8b77235572aa8a2f22498a0d10a4db0ccbdcf4a0e02d3864bbf22793349616b6" gracePeriod=600 Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.325482 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.422860 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerStarted","Data":"2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d"} Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.425963 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9","Type":"ContainerStarted","Data":"ceceb4877dc61f708ab3b8ba12fbdf77296949d7423e75a28f8694cf1accc01c"} Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.445562 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="8b77235572aa8a2f22498a0d10a4db0ccbdcf4a0e02d3864bbf22793349616b6" exitCode=0 Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.445636 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"8b77235572aa8a2f22498a0d10a4db0ccbdcf4a0e02d3864bbf22793349616b6"} Oct 03 13:13:32 crc kubenswrapper[4868]: I1003 13:13:32.445691 4868 scope.go:117] "RemoveContainer" containerID="7d461164a3dcc0127d9bde036985d70792014962ce61b7453395cd3dd71f6a6c" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.030672 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.067741 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-combined-ca-bundle\") pod \"d91974be-5f48-4772-951b-e4c4ac1eadc4\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.068318 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tc7zk\" (UniqueName: \"kubernetes.io/projected/d91974be-5f48-4772-951b-e4c4ac1eadc4-kube-api-access-tc7zk\") pod \"d91974be-5f48-4772-951b-e4c4ac1eadc4\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.068392 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-config-data\") pod \"d91974be-5f48-4772-951b-e4c4ac1eadc4\" (UID: \"d91974be-5f48-4772-951b-e4c4ac1eadc4\") " Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.086659 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d91974be-5f48-4772-951b-e4c4ac1eadc4-kube-api-access-tc7zk" (OuterVolumeSpecName: "kube-api-access-tc7zk") pod "d91974be-5f48-4772-951b-e4c4ac1eadc4" (UID: "d91974be-5f48-4772-951b-e4c4ac1eadc4"). InnerVolumeSpecName "kube-api-access-tc7zk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.138368 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-config-data" (OuterVolumeSpecName: "config-data") pod "d91974be-5f48-4772-951b-e4c4ac1eadc4" (UID: "d91974be-5f48-4772-951b-e4c4ac1eadc4"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.146454 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d91974be-5f48-4772-951b-e4c4ac1eadc4" (UID: "d91974be-5f48-4772-951b-e4c4ac1eadc4"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.171969 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.172006 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tc7zk\" (UniqueName: \"kubernetes.io/projected/d91974be-5f48-4772-951b-e4c4ac1eadc4-kube-api-access-tc7zk\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.172020 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d91974be-5f48-4772-951b-e4c4ac1eadc4-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.201328 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.273709 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47d81ef6-7fb4-4cd8-b217-2c034980317c-logs\") pod \"47d81ef6-7fb4-4cd8-b217-2c034980317c\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.273797 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-frdjm\" (UniqueName: \"kubernetes.io/projected/47d81ef6-7fb4-4cd8-b217-2c034980317c-kube-api-access-frdjm\") pod \"47d81ef6-7fb4-4cd8-b217-2c034980317c\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.273857 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-combined-ca-bundle\") pod \"47d81ef6-7fb4-4cd8-b217-2c034980317c\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.273919 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-config-data\") pod \"47d81ef6-7fb4-4cd8-b217-2c034980317c\" (UID: \"47d81ef6-7fb4-4cd8-b217-2c034980317c\") " Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.275625 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47d81ef6-7fb4-4cd8-b217-2c034980317c-logs" (OuterVolumeSpecName: "logs") pod "47d81ef6-7fb4-4cd8-b217-2c034980317c" (UID: "47d81ef6-7fb4-4cd8-b217-2c034980317c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.288419 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47d81ef6-7fb4-4cd8-b217-2c034980317c-kube-api-access-frdjm" (OuterVolumeSpecName: "kube-api-access-frdjm") pod "47d81ef6-7fb4-4cd8-b217-2c034980317c" (UID: "47d81ef6-7fb4-4cd8-b217-2c034980317c"). InnerVolumeSpecName "kube-api-access-frdjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.319747 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47d81ef6-7fb4-4cd8-b217-2c034980317c" (UID: "47d81ef6-7fb4-4cd8-b217-2c034980317c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.326133 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-config-data" (OuterVolumeSpecName: "config-data") pod "47d81ef6-7fb4-4cd8-b217-2c034980317c" (UID: "47d81ef6-7fb4-4cd8-b217-2c034980317c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.377730 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47d81ef6-7fb4-4cd8-b217-2c034980317c-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.378210 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-frdjm\" (UniqueName: \"kubernetes.io/projected/47d81ef6-7fb4-4cd8-b217-2c034980317c-kube-api-access-frdjm\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.378231 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.378244 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47d81ef6-7fb4-4cd8-b217-2c034980317c-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.461513 4868 generic.go:334] "Generic (PLEG): container finished" podID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerID="b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18" exitCode=0 Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.461626 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47d81ef6-7fb4-4cd8-b217-2c034980317c","Type":"ContainerDied","Data":"b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18"} Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.461713 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"47d81ef6-7fb4-4cd8-b217-2c034980317c","Type":"ContainerDied","Data":"0f420d7e7b4f91763b38fa800aacece7f008001b43e1b27fc2fb9b52af39b200"} Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.461746 4868 scope.go:117] "RemoveContainer" containerID="b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.461992 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.468465 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"90aa0fe0-12ad-4477-b6f3-7ce50f480fd9","Type":"ContainerStarted","Data":"860d76ac4fa428bbf7e85b07503a39b888072ed1587c1738a5ab4a8209db6cd3"} Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.468584 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.474444 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31"} Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.477844 4868 generic.go:334] "Generic (PLEG): container finished" podID="d91974be-5f48-4772-951b-e4c4ac1eadc4" containerID="94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" exitCode=0 Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.477905 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d91974be-5f48-4772-951b-e4c4ac1eadc4","Type":"ContainerDied","Data":"94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60"} Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.477938 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"d91974be-5f48-4772-951b-e4c4ac1eadc4","Type":"ContainerDied","Data":"52510002b30a5d24cf062b695ee35812a2bc417295307cd4f59e27999ec5350a"} Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.478007 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.504491 4868 scope.go:117] "RemoveContainer" containerID="9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.526539 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.526512213 podStartE2EDuration="2.526512213s" podCreationTimestamp="2025-10-03 13:13:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:33.489279204 +0000 UTC m=+1409.699128290" watchObservedRunningTime="2025-10-03 13:13:33.526512213 +0000 UTC m=+1409.736361279" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.528872 4868 scope.go:117] "RemoveContainer" containerID="b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18" Oct 03 13:13:33 crc kubenswrapper[4868]: E1003 13:13:33.529681 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18\": container with ID starting with b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18 not found: ID does not exist" containerID="b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.529786 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18"} err="failed to get container status \"b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18\": rpc error: code = NotFound desc = could not find container \"b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18\": container with ID starting with b165f845d5274af50c0b03a72ed98c83f6e67c433a7ce4fc07bf361b5662fb18 not found: ID does not exist" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.529864 4868 scope.go:117] "RemoveContainer" containerID="9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309" Oct 03 13:13:33 crc kubenswrapper[4868]: E1003 13:13:33.530483 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309\": container with ID starting with 9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309 not found: ID does not exist" containerID="9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.530525 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309"} err="failed to get container status \"9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309\": rpc error: code = NotFound desc = could not find container \"9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309\": container with ID starting with 9e15edf6355e65f5ff4e862fdca08714108e82f5915941dbec6f77872bc05309 not found: ID does not exist" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.530554 4868 scope.go:117] "RemoveContainer" containerID="94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.543680 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.553483 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.563987 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.573932 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.579317 4868 scope.go:117] "RemoveContainer" containerID="94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" Oct 03 13:13:33 crc kubenswrapper[4868]: E1003 13:13:33.583217 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60\": container with ID starting with 94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60 not found: ID does not exist" containerID="94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.583272 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60"} err="failed to get container status \"94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60\": rpc error: code = NotFound desc = could not find container \"94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60\": container with ID starting with 94efb746777cdd7df9ee3a2e08d0ea2b10feff5bb2086c081609453fde35cc60 not found: ID does not exist" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.583327 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: E1003 13:13:33.583885 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-api" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.583911 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-api" Oct 03 13:13:33 crc kubenswrapper[4868]: E1003 13:13:33.583945 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d91974be-5f48-4772-951b-e4c4ac1eadc4" containerName="nova-scheduler-scheduler" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.583955 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="d91974be-5f48-4772-951b-e4c4ac1eadc4" containerName="nova-scheduler-scheduler" Oct 03 13:13:33 crc kubenswrapper[4868]: E1003 13:13:33.583973 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-log" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.583980 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-log" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.584252 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-api" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.584279 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="d91974be-5f48-4772-951b-e4c4ac1eadc4" containerName="nova-scheduler-scheduler" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.584304 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" containerName="nova-api-log" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.586160 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.590455 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.599138 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.602010 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.605990 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.613466 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.623736 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.688957 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.689097 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8l9r9\" (UniqueName: \"kubernetes.io/projected/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-kube-api-access-8l9r9\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.689340 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-config-data\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.689396 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.689740 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6s5w9\" (UniqueName: \"kubernetes.io/projected/f2da605b-813a-4738-bedf-b858cb223fd8-kube-api-access-6s5w9\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.689956 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2da605b-813a-4738-bedf-b858cb223fd8-logs\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.690040 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-config-data\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.791748 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6s5w9\" (UniqueName: \"kubernetes.io/projected/f2da605b-813a-4738-bedf-b858cb223fd8-kube-api-access-6s5w9\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.791852 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2da605b-813a-4738-bedf-b858cb223fd8-logs\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.791887 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-config-data\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.791931 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.791973 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8l9r9\" (UniqueName: \"kubernetes.io/projected/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-kube-api-access-8l9r9\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.792020 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-config-data\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.792038 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.792932 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2da605b-813a-4738-bedf-b858cb223fd8-logs\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.806716 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-config-data\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.806785 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.807103 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.807367 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-config-data\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.809822 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6s5w9\" (UniqueName: \"kubernetes.io/projected/f2da605b-813a-4738-bedf-b858cb223fd8-kube-api-access-6s5w9\") pod \"nova-api-0\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.815173 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8l9r9\" (UniqueName: \"kubernetes.io/projected/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-kube-api-access-8l9r9\") pod \"nova-scheduler-0\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " pod="openstack/nova-scheduler-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.916499 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:13:33 crc kubenswrapper[4868]: I1003 13:13:33.940942 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:13:34 crc kubenswrapper[4868]: I1003 13:13:34.455281 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:34 crc kubenswrapper[4868]: I1003 13:13:34.497005 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerStarted","Data":"f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe"} Oct 03 13:13:34 crc kubenswrapper[4868]: I1003 13:13:34.498887 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2da605b-813a-4738-bedf-b858cb223fd8","Type":"ContainerStarted","Data":"ac87ed73051b69ce9437f500f215de526e2de116cec73befed6bbcffb176988b"} Oct 03 13:13:34 crc kubenswrapper[4868]: I1003 13:13:34.572689 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47d81ef6-7fb4-4cd8-b217-2c034980317c" path="/var/lib/kubelet/pods/47d81ef6-7fb4-4cd8-b217-2c034980317c/volumes" Oct 03 13:13:34 crc kubenswrapper[4868]: I1003 13:13:34.575142 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d91974be-5f48-4772-951b-e4c4ac1eadc4" path="/var/lib/kubelet/pods/d91974be-5f48-4772-951b-e4c4ac1eadc4/volumes" Oct 03 13:13:34 crc kubenswrapper[4868]: I1003 13:13:34.633575 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Oct 03 13:13:34 crc kubenswrapper[4868]: I1003 13:13:34.790013 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:13:35 crc kubenswrapper[4868]: I1003 13:13:35.519214 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b44f2cd-df8b-45f0-ada0-3e8fb6093726","Type":"ContainerStarted","Data":"bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03"} Oct 03 13:13:35 crc kubenswrapper[4868]: I1003 13:13:35.521377 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b44f2cd-df8b-45f0-ada0-3e8fb6093726","Type":"ContainerStarted","Data":"8f6785f7830606da0865d565a9c5d61a2e251c15d745845c44c6c90926f5dadf"} Oct 03 13:13:35 crc kubenswrapper[4868]: I1003 13:13:35.524727 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerStarted","Data":"0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89"} Oct 03 13:13:35 crc kubenswrapper[4868]: I1003 13:13:35.528830 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2da605b-813a-4738-bedf-b858cb223fd8","Type":"ContainerStarted","Data":"054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a"} Oct 03 13:13:35 crc kubenswrapper[4868]: I1003 13:13:35.528990 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2da605b-813a-4738-bedf-b858cb223fd8","Type":"ContainerStarted","Data":"8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c"} Oct 03 13:13:35 crc kubenswrapper[4868]: I1003 13:13:35.548560 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.548538746 podStartE2EDuration="2.548538746s" podCreationTimestamp="2025-10-03 13:13:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:35.546867951 +0000 UTC m=+1411.756717027" watchObservedRunningTime="2025-10-03 13:13:35.548538746 +0000 UTC m=+1411.758387812" Oct 03 13:13:35 crc kubenswrapper[4868]: I1003 13:13:35.577648 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.577622006 podStartE2EDuration="2.577622006s" podCreationTimestamp="2025-10-03 13:13:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:35.568021998 +0000 UTC m=+1411.777871064" watchObservedRunningTime="2025-10-03 13:13:35.577622006 +0000 UTC m=+1411.787471072" Oct 03 13:13:37 crc kubenswrapper[4868]: I1003 13:13:37.552491 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerStarted","Data":"9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8"} Oct 03 13:13:37 crc kubenswrapper[4868]: I1003 13:13:37.590778 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.065541871 podStartE2EDuration="7.590747379s" podCreationTimestamp="2025-10-03 13:13:30 +0000 UTC" firstStartedPulling="2025-10-03 13:13:31.330587467 +0000 UTC m=+1407.540436533" lastFinishedPulling="2025-10-03 13:13:36.855792975 +0000 UTC m=+1413.065642041" observedRunningTime="2025-10-03 13:13:37.578378328 +0000 UTC m=+1413.788227394" watchObservedRunningTime="2025-10-03 13:13:37.590747379 +0000 UTC m=+1413.800596455" Oct 03 13:13:38 crc kubenswrapper[4868]: I1003 13:13:38.564903 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:13:38 crc kubenswrapper[4868]: I1003 13:13:38.941922 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 13:13:41 crc kubenswrapper[4868]: I1003 13:13:41.838459 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Oct 03 13:13:43 crc kubenswrapper[4868]: I1003 13:13:43.917717 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:13:43 crc kubenswrapper[4868]: I1003 13:13:43.919553 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:13:43 crc kubenswrapper[4868]: I1003 13:13:43.941715 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 13:13:43 crc kubenswrapper[4868]: I1003 13:13:43.969504 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 13:13:44 crc kubenswrapper[4868]: I1003 13:13:44.648346 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 13:13:45 crc kubenswrapper[4868]: I1003 13:13:45.000418 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:13:45 crc kubenswrapper[4868]: I1003 13:13:45.000872 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.193:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.550775 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.680856 4868 generic.go:334] "Generic (PLEG): container finished" podID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerID="0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989" exitCode=137 Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.681245 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d6573ee-1cc0-4116-881f-00c784f4a9b0","Type":"ContainerDied","Data":"0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989"} Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.681280 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"0d6573ee-1cc0-4116-881f-00c784f4a9b0","Type":"ContainerDied","Data":"9ebb7b99f0c3199c01ec80a99b25c65bda1bde4945e571e7c02aff92b3d9b330"} Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.681312 4868 scope.go:117] "RemoveContainer" containerID="0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.681513 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.705391 4868 scope.go:117] "RemoveContainer" containerID="14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.715562 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-config-data\") pod \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.716599 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbwmm\" (UniqueName: \"kubernetes.io/projected/0d6573ee-1cc0-4116-881f-00c784f4a9b0-kube-api-access-tbwmm\") pod \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.716692 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-combined-ca-bundle\") pod \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.716799 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d6573ee-1cc0-4116-881f-00c784f4a9b0-logs\") pod \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\" (UID: \"0d6573ee-1cc0-4116-881f-00c784f4a9b0\") " Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.717457 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d6573ee-1cc0-4116-881f-00c784f4a9b0-logs" (OuterVolumeSpecName: "logs") pod "0d6573ee-1cc0-4116-881f-00c784f4a9b0" (UID: "0d6573ee-1cc0-4116-881f-00c784f4a9b0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.721738 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d6573ee-1cc0-4116-881f-00c784f4a9b0-kube-api-access-tbwmm" (OuterVolumeSpecName: "kube-api-access-tbwmm") pod "0d6573ee-1cc0-4116-881f-00c784f4a9b0" (UID: "0d6573ee-1cc0-4116-881f-00c784f4a9b0"). InnerVolumeSpecName "kube-api-access-tbwmm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.725483 4868 scope.go:117] "RemoveContainer" containerID="0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989" Oct 03 13:13:50 crc kubenswrapper[4868]: E1003 13:13:50.727234 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989\": container with ID starting with 0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989 not found: ID does not exist" containerID="0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.727293 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989"} err="failed to get container status \"0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989\": rpc error: code = NotFound desc = could not find container \"0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989\": container with ID starting with 0c95a47bcc5709b96cec5ab6e98dda7bcebb63afc33f4612233f06e362662989 not found: ID does not exist" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.727327 4868 scope.go:117] "RemoveContainer" containerID="14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314" Oct 03 13:13:50 crc kubenswrapper[4868]: E1003 13:13:50.728636 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314\": container with ID starting with 14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314 not found: ID does not exist" containerID="14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.728690 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314"} err="failed to get container status \"14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314\": rpc error: code = NotFound desc = could not find container \"14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314\": container with ID starting with 14166a2c230c9d76540c6ee8f86c4b6e34a8dab78df5ad1cd2673fa276648314 not found: ID does not exist" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.749289 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-config-data" (OuterVolumeSpecName: "config-data") pod "0d6573ee-1cc0-4116-881f-00c784f4a9b0" (UID: "0d6573ee-1cc0-4116-881f-00c784f4a9b0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.750322 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d6573ee-1cc0-4116-881f-00c784f4a9b0" (UID: "0d6573ee-1cc0-4116-881f-00c784f4a9b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.819139 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d6573ee-1cc0-4116-881f-00c784f4a9b0-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.819205 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.819217 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbwmm\" (UniqueName: \"kubernetes.io/projected/0d6573ee-1cc0-4116-881f-00c784f4a9b0-kube-api-access-tbwmm\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:50 crc kubenswrapper[4868]: I1003 13:13:50.819235 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d6573ee-1cc0-4116-881f-00c784f4a9b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.023551 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.035773 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.055257 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:51 crc kubenswrapper[4868]: E1003 13:13:51.055937 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-metadata" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.055958 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-metadata" Oct 03 13:13:51 crc kubenswrapper[4868]: E1003 13:13:51.055989 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-log" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.055996 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-log" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.056246 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-metadata" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.056271 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" containerName="nova-metadata-log" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.057619 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.060923 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.061200 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.081190 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.126758 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-config-data\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.126850 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb416aa-1925-4717-b68b-1e0edcf34b85-logs\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.127289 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.127357 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dsnq\" (UniqueName: \"kubernetes.io/projected/feb416aa-1925-4717-b68b-1e0edcf34b85-kube-api-access-2dsnq\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.127470 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.228920 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.228969 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dsnq\" (UniqueName: \"kubernetes.io/projected/feb416aa-1925-4717-b68b-1e0edcf34b85-kube-api-access-2dsnq\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.229010 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.229295 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-config-data\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.229323 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb416aa-1925-4717-b68b-1e0edcf34b85-logs\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.229816 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb416aa-1925-4717-b68b-1e0edcf34b85-logs\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.233705 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.235456 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-config-data\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.235691 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.247626 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dsnq\" (UniqueName: \"kubernetes.io/projected/feb416aa-1925-4717-b68b-1e0edcf34b85-kube-api-access-2dsnq\") pod \"nova-metadata-0\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.380205 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:13:51 crc kubenswrapper[4868]: I1003 13:13:51.844025 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:13:51 crc kubenswrapper[4868]: W1003 13:13:51.850383 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfeb416aa_1925_4717_b68b_1e0edcf34b85.slice/crio-4ae5f63a298ce4adf38c45a6613c355ea39f768872bd9eee1b229d1514a7639e WatchSource:0}: Error finding container 4ae5f63a298ce4adf38c45a6613c355ea39f768872bd9eee1b229d1514a7639e: Status 404 returned error can't find the container with id 4ae5f63a298ce4adf38c45a6613c355ea39f768872bd9eee1b229d1514a7639e Oct 03 13:13:52 crc kubenswrapper[4868]: I1003 13:13:52.558526 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d6573ee-1cc0-4116-881f-00c784f4a9b0" path="/var/lib/kubelet/pods/0d6573ee-1cc0-4116-881f-00c784f4a9b0/volumes" Oct 03 13:13:52 crc kubenswrapper[4868]: I1003 13:13:52.704143 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"feb416aa-1925-4717-b68b-1e0edcf34b85","Type":"ContainerStarted","Data":"e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1"} Oct 03 13:13:52 crc kubenswrapper[4868]: I1003 13:13:52.704497 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"feb416aa-1925-4717-b68b-1e0edcf34b85","Type":"ContainerStarted","Data":"e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1"} Oct 03 13:13:52 crc kubenswrapper[4868]: I1003 13:13:52.704510 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"feb416aa-1925-4717-b68b-1e0edcf34b85","Type":"ContainerStarted","Data":"4ae5f63a298ce4adf38c45a6613c355ea39f768872bd9eee1b229d1514a7639e"} Oct 03 13:13:52 crc kubenswrapper[4868]: I1003 13:13:52.733171 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=1.733146793 podStartE2EDuration="1.733146793s" podCreationTimestamp="2025-10-03 13:13:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:52.727534013 +0000 UTC m=+1428.937383099" watchObservedRunningTime="2025-10-03 13:13:52.733146793 +0000 UTC m=+1428.942995859" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.633950 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.717659 4868 generic.go:334] "Generic (PLEG): container finished" podID="06025e3f-67d5-43f8-8a1f-411eb3b835ad" containerID="5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0" exitCode=137 Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.717735 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.717788 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"06025e3f-67d5-43f8-8a1f-411eb3b835ad","Type":"ContainerDied","Data":"5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0"} Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.717856 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"06025e3f-67d5-43f8-8a1f-411eb3b835ad","Type":"ContainerDied","Data":"3ab1d3299a35f742e7fbf64acd055fe00cc8696880d84d5bde04827ea307594d"} Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.717878 4868 scope.go:117] "RemoveContainer" containerID="5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.740921 4868 scope.go:117] "RemoveContainer" containerID="5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0" Oct 03 13:13:53 crc kubenswrapper[4868]: E1003 13:13:53.741401 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0\": container with ID starting with 5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0 not found: ID does not exist" containerID="5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.741439 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0"} err="failed to get container status \"5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0\": rpc error: code = NotFound desc = could not find container \"5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0\": container with ID starting with 5dc3bc7e1e7a1ef39965040ef872aecd11d05584d9a857a59d3d31810cd6e5c0 not found: ID does not exist" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.785606 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-config-data\") pod \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.785694 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4vmjk\" (UniqueName: \"kubernetes.io/projected/06025e3f-67d5-43f8-8a1f-411eb3b835ad-kube-api-access-4vmjk\") pod \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.785842 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-combined-ca-bundle\") pod \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\" (UID: \"06025e3f-67d5-43f8-8a1f-411eb3b835ad\") " Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.792961 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06025e3f-67d5-43f8-8a1f-411eb3b835ad-kube-api-access-4vmjk" (OuterVolumeSpecName: "kube-api-access-4vmjk") pod "06025e3f-67d5-43f8-8a1f-411eb3b835ad" (UID: "06025e3f-67d5-43f8-8a1f-411eb3b835ad"). InnerVolumeSpecName "kube-api-access-4vmjk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.823883 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06025e3f-67d5-43f8-8a1f-411eb3b835ad" (UID: "06025e3f-67d5-43f8-8a1f-411eb3b835ad"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.831783 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-config-data" (OuterVolumeSpecName: "config-data") pod "06025e3f-67d5-43f8-8a1f-411eb3b835ad" (UID: "06025e3f-67d5-43f8-8a1f-411eb3b835ad"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.887933 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.888111 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4vmjk\" (UniqueName: \"kubernetes.io/projected/06025e3f-67d5-43f8-8a1f-411eb3b835ad-kube-api-access-4vmjk\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.888196 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06025e3f-67d5-43f8-8a1f-411eb3b835ad-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.920696 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.920761 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.921222 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.921253 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.923422 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:13:53 crc kubenswrapper[4868]: I1003 13:13:53.926558 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.098369 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.135540 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.160009 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6gqrq"] Oct 03 13:13:54 crc kubenswrapper[4868]: E1003 13:13:54.160622 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06025e3f-67d5-43f8-8a1f-411eb3b835ad" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.160639 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="06025e3f-67d5-43f8-8a1f-411eb3b835ad" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.160908 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="06025e3f-67d5-43f8-8a1f-411eb3b835ad" containerName="nova-cell1-novncproxy-novncproxy" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.162158 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.168328 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.173797 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.179388 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.179776 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.180217 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.180922 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6gqrq"] Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.189321 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.321420 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.321702 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-flc7n\" (UniqueName: \"kubernetes.io/projected/76f656ae-c9ea-41e9-8b01-15485b063729-kube-api-access-flc7n\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.321750 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9d55d\" (UniqueName: \"kubernetes.io/projected/636e5929-0de9-4493-89e1-7844be486324-kube-api-access-9d55d\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.321787 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.321825 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.321970 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.322031 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-config\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.322137 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.322171 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.322297 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.322331 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.424620 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425597 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425685 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425723 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425764 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425794 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-flc7n\" (UniqueName: \"kubernetes.io/projected/76f656ae-c9ea-41e9-8b01-15485b063729-kube-api-access-flc7n\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425827 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9d55d\" (UniqueName: \"kubernetes.io/projected/636e5929-0de9-4493-89e1-7844be486324-kube-api-access-9d55d\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425869 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425906 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425964 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.425997 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-config\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.426843 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.426891 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.426919 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-config\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.427212 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.427613 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.431132 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.431698 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.432479 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.432501 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/636e5929-0de9-4493-89e1-7844be486324-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.460819 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9d55d\" (UniqueName: \"kubernetes.io/projected/636e5929-0de9-4493-89e1-7844be486324-kube-api-access-9d55d\") pod \"nova-cell1-novncproxy-0\" (UID: \"636e5929-0de9-4493-89e1-7844be486324\") " pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.461123 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-flc7n\" (UniqueName: \"kubernetes.io/projected/76f656ae-c9ea-41e9-8b01-15485b063729-kube-api-access-flc7n\") pod \"dnsmasq-dns-89c5cd4d5-6gqrq\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.508392 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.520272 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.586886 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06025e3f-67d5-43f8-8a1f-411eb3b835ad" path="/var/lib/kubelet/pods/06025e3f-67d5-43f8-8a1f-411eb3b835ad/volumes" Oct 03 13:13:54 crc kubenswrapper[4868]: I1003 13:13:54.992770 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6gqrq"] Oct 03 13:13:55 crc kubenswrapper[4868]: W1003 13:13:55.009947 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76f656ae_c9ea_41e9_8b01_15485b063729.slice/crio-4d3cb025c33d82c8a3e192faba7da76af70caf451bfaaf3562bb20b0f4192ba2 WatchSource:0}: Error finding container 4d3cb025c33d82c8a3e192faba7da76af70caf451bfaaf3562bb20b0f4192ba2: Status 404 returned error can't find the container with id 4d3cb025c33d82c8a3e192faba7da76af70caf451bfaaf3562bb20b0f4192ba2 Oct 03 13:13:55 crc kubenswrapper[4868]: I1003 13:13:55.063907 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Oct 03 13:13:55 crc kubenswrapper[4868]: I1003 13:13:55.767675 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"636e5929-0de9-4493-89e1-7844be486324","Type":"ContainerStarted","Data":"7c4c65d6f3ccb2b522f1235e9c9b47e6cf2044e52ae7aee46cc2aa5a89569996"} Oct 03 13:13:55 crc kubenswrapper[4868]: I1003 13:13:55.768048 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"636e5929-0de9-4493-89e1-7844be486324","Type":"ContainerStarted","Data":"e63ac1ae8cb2028665d59a2f06e9da82dc1abbb95d2e43be3bf27e9524e956bc"} Oct 03 13:13:55 crc kubenswrapper[4868]: I1003 13:13:55.770202 4868 generic.go:334] "Generic (PLEG): container finished" podID="76f656ae-c9ea-41e9-8b01-15485b063729" containerID="17421209ee7d31e74be9f41663a25fc26200a742fc6cc15d469d77ba8ecb101b" exitCode=0 Oct 03 13:13:55 crc kubenswrapper[4868]: I1003 13:13:55.771123 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" event={"ID":"76f656ae-c9ea-41e9-8b01-15485b063729","Type":"ContainerDied","Data":"17421209ee7d31e74be9f41663a25fc26200a742fc6cc15d469d77ba8ecb101b"} Oct 03 13:13:55 crc kubenswrapper[4868]: I1003 13:13:55.771162 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" event={"ID":"76f656ae-c9ea-41e9-8b01-15485b063729","Type":"ContainerStarted","Data":"4d3cb025c33d82c8a3e192faba7da76af70caf451bfaaf3562bb20b0f4192ba2"} Oct 03 13:13:55 crc kubenswrapper[4868]: I1003 13:13:55.789948 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.78992313 podStartE2EDuration="1.78992313s" podCreationTimestamp="2025-10-03 13:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:55.786122387 +0000 UTC m=+1431.995971473" watchObservedRunningTime="2025-10-03 13:13:55.78992313 +0000 UTC m=+1431.999772186" Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.340183 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.340832 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-central-agent" containerID="cri-o://2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d" gracePeriod=30 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.340980 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="proxy-httpd" containerID="cri-o://9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8" gracePeriod=30 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.341014 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="sg-core" containerID="cri-o://0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89" gracePeriod=30 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.341058 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-notification-agent" containerID="cri-o://f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe" gracePeriod=30 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.362818 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.191:3000/\": EOF" Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.381130 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.381185 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.764744 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.829400 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" event={"ID":"76f656ae-c9ea-41e9-8b01-15485b063729","Type":"ContainerStarted","Data":"d5e1fc736af22d706c7a227c1b7918b0a778e908fd408ef6eba93f64dfcfcdbf"} Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.829491 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.844934 4868 generic.go:334] "Generic (PLEG): container finished" podID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerID="9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8" exitCode=0 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.844974 4868 generic.go:334] "Generic (PLEG): container finished" podID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerID="0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89" exitCode=2 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.844985 4868 generic.go:334] "Generic (PLEG): container finished" podID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerID="2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d" exitCode=0 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.845018 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerDied","Data":"9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8"} Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.845092 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerDied","Data":"0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89"} Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.845104 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerDied","Data":"2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d"} Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.845374 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-log" containerID="cri-o://8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c" gracePeriod=30 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.845433 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-api" containerID="cri-o://054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a" gracePeriod=30 Oct 03 13:13:56 crc kubenswrapper[4868]: I1003 13:13:56.858505 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" podStartSLOduration=2.8584826789999997 podStartE2EDuration="2.858482679s" podCreationTimestamp="2025-10-03 13:13:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:13:56.854383179 +0000 UTC m=+1433.064232265" watchObservedRunningTime="2025-10-03 13:13:56.858482679 +0000 UTC m=+1433.068331745" Oct 03 13:13:57 crc kubenswrapper[4868]: I1003 13:13:57.860998 4868 generic.go:334] "Generic (PLEG): container finished" podID="f2da605b-813a-4738-bedf-b858cb223fd8" containerID="8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c" exitCode=143 Oct 03 13:13:57 crc kubenswrapper[4868]: I1003 13:13:57.861240 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2da605b-813a-4738-bedf-b858cb223fd8","Type":"ContainerDied","Data":"8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c"} Oct 03 13:13:59 crc kubenswrapper[4868]: I1003 13:13:59.520635 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.449509 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.578565 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-config-data\") pod \"f2da605b-813a-4738-bedf-b858cb223fd8\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.578727 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6s5w9\" (UniqueName: \"kubernetes.io/projected/f2da605b-813a-4738-bedf-b858cb223fd8-kube-api-access-6s5w9\") pod \"f2da605b-813a-4738-bedf-b858cb223fd8\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.578784 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-combined-ca-bundle\") pod \"f2da605b-813a-4738-bedf-b858cb223fd8\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.578858 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2da605b-813a-4738-bedf-b858cb223fd8-logs\") pod \"f2da605b-813a-4738-bedf-b858cb223fd8\" (UID: \"f2da605b-813a-4738-bedf-b858cb223fd8\") " Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.579739 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f2da605b-813a-4738-bedf-b858cb223fd8-logs" (OuterVolumeSpecName: "logs") pod "f2da605b-813a-4738-bedf-b858cb223fd8" (UID: "f2da605b-813a-4738-bedf-b858cb223fd8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.580883 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f2da605b-813a-4738-bedf-b858cb223fd8-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.620038 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2da605b-813a-4738-bedf-b858cb223fd8-kube-api-access-6s5w9" (OuterVolumeSpecName: "kube-api-access-6s5w9") pod "f2da605b-813a-4738-bedf-b858cb223fd8" (UID: "f2da605b-813a-4738-bedf-b858cb223fd8"). InnerVolumeSpecName "kube-api-access-6s5w9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.635547 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2da605b-813a-4738-bedf-b858cb223fd8" (UID: "f2da605b-813a-4738-bedf-b858cb223fd8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.658746 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-config-data" (OuterVolumeSpecName: "config-data") pod "f2da605b-813a-4738-bedf-b858cb223fd8" (UID: "f2da605b-813a-4738-bedf-b858cb223fd8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.683204 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.683242 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2da605b-813a-4738-bedf-b858cb223fd8-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.683252 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6s5w9\" (UniqueName: \"kubernetes.io/projected/f2da605b-813a-4738-bedf-b858cb223fd8-kube-api-access-6s5w9\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.823446 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.191:3000/\": dial tcp 10.217.0.191:3000: connect: connection refused" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.892656 4868 generic.go:334] "Generic (PLEG): container finished" podID="f2da605b-813a-4738-bedf-b858cb223fd8" containerID="054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a" exitCode=0 Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.892710 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2da605b-813a-4738-bedf-b858cb223fd8","Type":"ContainerDied","Data":"054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a"} Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.892742 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f2da605b-813a-4738-bedf-b858cb223fd8","Type":"ContainerDied","Data":"ac87ed73051b69ce9437f500f215de526e2de116cec73befed6bbcffb176988b"} Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.892763 4868 scope.go:117] "RemoveContainer" containerID="054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.893356 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.922879 4868 scope.go:117] "RemoveContainer" containerID="8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.925763 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.934409 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.953561 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:00 crc kubenswrapper[4868]: E1003 13:14:00.954395 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-api" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.954497 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-api" Oct 03 13:14:00 crc kubenswrapper[4868]: E1003 13:14:00.954645 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-log" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.954707 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-log" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.954968 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-api" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.955044 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" containerName="nova-api-log" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.957228 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.959923 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.960258 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.960491 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.962713 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.967736 4868 scope.go:117] "RemoveContainer" containerID="054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a" Oct 03 13:14:00 crc kubenswrapper[4868]: E1003 13:14:00.968334 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a\": container with ID starting with 054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a not found: ID does not exist" containerID="054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.968475 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a"} err="failed to get container status \"054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a\": rpc error: code = NotFound desc = could not find container \"054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a\": container with ID starting with 054110ce47f41fcbb544c665e071da127669d306c466b36c625dc3888cb8e82a not found: ID does not exist" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.968598 4868 scope.go:117] "RemoveContainer" containerID="8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c" Oct 03 13:14:00 crc kubenswrapper[4868]: E1003 13:14:00.969005 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c\": container with ID starting with 8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c not found: ID does not exist" containerID="8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c" Oct 03 13:14:00 crc kubenswrapper[4868]: I1003 13:14:00.969037 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c"} err="failed to get container status \"8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c\": rpc error: code = NotFound desc = could not find container \"8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c\": container with ID starting with 8b138d68398a550ad638e9d11642e2e2069f1c4af14d4a3a7acea63efbf1b99c not found: ID does not exist" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.094685 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvvxk\" (UniqueName: \"kubernetes.io/projected/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-kube-api-access-rvvxk\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.094761 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.094817 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-internal-tls-certs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.094830 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-public-tls-certs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.094846 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-logs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.094906 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-config-data\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.197209 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.197318 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-internal-tls-certs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.197343 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-public-tls-certs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.197365 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-logs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.197455 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-config-data\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.197534 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvvxk\" (UniqueName: \"kubernetes.io/projected/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-kube-api-access-rvvxk\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.198305 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-logs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.202776 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-internal-tls-certs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.205506 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-public-tls-certs\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.207600 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.212324 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-config-data\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.214645 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvvxk\" (UniqueName: \"kubernetes.io/projected/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-kube-api-access-rvvxk\") pod \"nova-api-0\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.312652 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.380964 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.383479 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.847363 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.931457 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.933106 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09a94677-e9df-4e5e-9a4a-3f07ec0f5944","Type":"ContainerStarted","Data":"6138443c3c93d2e29fec546f6caefdf6b236dd27aeaf409d229e54674bc34b73"} Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.940442 4868 generic.go:334] "Generic (PLEG): container finished" podID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerID="f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe" exitCode=0 Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.940542 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerDied","Data":"f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe"} Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.940588 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5","Type":"ContainerDied","Data":"608780fdd43b7946e6e47899f5522f56b7c747bf39ad99bb2181b6d193327e1a"} Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.940614 4868 scope.go:117] "RemoveContainer" containerID="9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.940760 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:14:01 crc kubenswrapper[4868]: I1003 13:14:01.992913 4868 scope.go:117] "RemoveContainer" containerID="0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.021952 4868 scope.go:117] "RemoveContainer" containerID="f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.047672 4868 scope.go:117] "RemoveContainer" containerID="2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.070332 4868 scope.go:117] "RemoveContainer" containerID="9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8" Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.070777 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8\": container with ID starting with 9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8 not found: ID does not exist" containerID="9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.070813 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8"} err="failed to get container status \"9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8\": rpc error: code = NotFound desc = could not find container \"9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8\": container with ID starting with 9239374339ca23e546dbd7c7b2b3e23d4428d1d041c593307259369be1e2aed8 not found: ID does not exist" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.070840 4868 scope.go:117] "RemoveContainer" containerID="0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89" Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.071310 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89\": container with ID starting with 0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89 not found: ID does not exist" containerID="0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.071334 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89"} err="failed to get container status \"0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89\": rpc error: code = NotFound desc = could not find container \"0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89\": container with ID starting with 0dfb54f022c10013c70ca9af7e2b026c61b56caccf67e40a17e84c2a1b646b89 not found: ID does not exist" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.071349 4868 scope.go:117] "RemoveContainer" containerID="f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe" Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.071625 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe\": container with ID starting with f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe not found: ID does not exist" containerID="f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.071644 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe"} err="failed to get container status \"f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe\": rpc error: code = NotFound desc = could not find container \"f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe\": container with ID starting with f84cf2aa8a65ad7e5494ebb712c4758de506b9d6e4b6493fe1d24e8f9576babe not found: ID does not exist" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.071662 4868 scope.go:117] "RemoveContainer" containerID="2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d" Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.071868 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d\": container with ID starting with 2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d not found: ID does not exist" containerID="2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.071894 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d"} err="failed to get container status \"2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d\": rpc error: code = NotFound desc = could not find container \"2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d\": container with ID starting with 2ce1a26691481184c27ad0ab9990185c805641c1b1dd69b5447f37d8e4a4470d not found: ID does not exist" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.117213 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-config-data\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.117304 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-scripts\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.117382 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-combined-ca-bundle\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.117413 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-sg-core-conf-yaml\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.117503 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-log-httpd\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.120789 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-ceilometer-tls-certs\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.120846 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-run-httpd\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.120938 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p7wmn\" (UniqueName: \"kubernetes.io/projected/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-kube-api-access-p7wmn\") pod \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\" (UID: \"aa2f223a-b08d-4028-b8f9-1e5e959cdcf5\") " Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.121257 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.122264 4868 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-log-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.122585 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.124003 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-scripts" (OuterVolumeSpecName: "scripts") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.162418 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-kube-api-access-p7wmn" (OuterVolumeSpecName: "kube-api-access-p7wmn") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "kube-api-access-p7wmn". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.176085 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.203209 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.224000 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.224035 4868 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.224047 4868 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.224143 4868 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-run-httpd\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.224157 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p7wmn\" (UniqueName: \"kubernetes.io/projected/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-kube-api-access-p7wmn\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.229594 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.272199 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-config-data" (OuterVolumeSpecName: "config-data") pod "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" (UID: "aa2f223a-b08d-4028-b8f9-1e5e959cdcf5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.326172 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.326208 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.403311 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.403341 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.555005 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2da605b-813a-4738-bedf-b858cb223fd8" path="/var/lib/kubelet/pods/f2da605b-813a-4738-bedf-b858cb223fd8/volumes" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.595908 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.618779 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.634567 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.635212 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="sg-core" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.635292 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="sg-core" Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.635382 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-notification-agent" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.635464 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-notification-agent" Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.635535 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-central-agent" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.635589 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-central-agent" Oct 03 13:14:02 crc kubenswrapper[4868]: E1003 13:14:02.635651 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="proxy-httpd" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.635722 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="proxy-httpd" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.635948 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-notification-agent" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.636018 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="ceilometer-central-agent" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.636117 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="proxy-httpd" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.636249 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" containerName="sg-core" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.638356 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.641336 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.641823 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.642110 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.645744 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.737825 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-scripts\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.738189 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b55a39cc-8088-4a92-9976-b45a5e69ffd6-log-httpd\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.738300 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49wxq\" (UniqueName: \"kubernetes.io/projected/b55a39cc-8088-4a92-9976-b45a5e69ffd6-kube-api-access-49wxq\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.738415 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.738504 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b55a39cc-8088-4a92-9976-b45a5e69ffd6-run-httpd\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.738588 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-config-data\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.738671 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.738748 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841452 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841559 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b55a39cc-8088-4a92-9976-b45a5e69ffd6-run-httpd\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841613 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-config-data\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841640 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841686 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841915 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-scripts\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841940 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b55a39cc-8088-4a92-9976-b45a5e69ffd6-log-httpd\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.841996 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49wxq\" (UniqueName: \"kubernetes.io/projected/b55a39cc-8088-4a92-9976-b45a5e69ffd6-kube-api-access-49wxq\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.843091 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b55a39cc-8088-4a92-9976-b45a5e69ffd6-log-httpd\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.843744 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b55a39cc-8088-4a92-9976-b45a5e69ffd6-run-httpd\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.847632 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.848490 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-scripts\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.848622 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-config-data\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.849239 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.849277 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b55a39cc-8088-4a92-9976-b45a5e69ffd6-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.863893 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49wxq\" (UniqueName: \"kubernetes.io/projected/b55a39cc-8088-4a92-9976-b45a5e69ffd6-kube-api-access-49wxq\") pod \"ceilometer-0\" (UID: \"b55a39cc-8088-4a92-9976-b45a5e69ffd6\") " pod="openstack/ceilometer-0" Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.952981 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09a94677-e9df-4e5e-9a4a-3f07ec0f5944","Type":"ContainerStarted","Data":"4f5f148aeef9cf7c2649bf41561e25347ec254b068876892ee87945467f205a7"} Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.953331 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09a94677-e9df-4e5e-9a4a-3f07ec0f5944","Type":"ContainerStarted","Data":"19df6d71d80daf92f9a4cd4e608ff8fca22f8509c207cb6d659597248ad55c8c"} Oct 03 13:14:02 crc kubenswrapper[4868]: I1003 13:14:02.956806 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Oct 03 13:14:03 crc kubenswrapper[4868]: I1003 13:14:03.021763 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=3.021729893 podStartE2EDuration="3.021729893s" podCreationTimestamp="2025-10-03 13:14:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:14:02.978141523 +0000 UTC m=+1439.187990609" watchObservedRunningTime="2025-10-03 13:14:03.021729893 +0000 UTC m=+1439.231578949" Oct 03 13:14:03 crc kubenswrapper[4868]: I1003 13:14:03.478107 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Oct 03 13:14:03 crc kubenswrapper[4868]: W1003 13:14:03.480400 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb55a39cc_8088_4a92_9976_b45a5e69ffd6.slice/crio-9f4e4633cc8fc17599dc8217f630596280a52e3ded8ab17989e1fc8aedbcb2a9 WatchSource:0}: Error finding container 9f4e4633cc8fc17599dc8217f630596280a52e3ded8ab17989e1fc8aedbcb2a9: Status 404 returned error can't find the container with id 9f4e4633cc8fc17599dc8217f630596280a52e3ded8ab17989e1fc8aedbcb2a9 Oct 03 13:14:03 crc kubenswrapper[4868]: I1003 13:14:03.969371 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b55a39cc-8088-4a92-9976-b45a5e69ffd6","Type":"ContainerStarted","Data":"9f4e4633cc8fc17599dc8217f630596280a52e3ded8ab17989e1fc8aedbcb2a9"} Oct 03 13:14:04 crc kubenswrapper[4868]: I1003 13:14:04.511339 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:14:04 crc kubenswrapper[4868]: I1003 13:14:04.520990 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:14:04 crc kubenswrapper[4868]: I1003 13:14:04.586991 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa2f223a-b08d-4028-b8f9-1e5e959cdcf5" path="/var/lib/kubelet/pods/aa2f223a-b08d-4028-b8f9-1e5e959cdcf5/volumes" Oct 03 13:14:04 crc kubenswrapper[4868]: I1003 13:14:04.587762 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-xx55n"] Oct 03 13:14:04 crc kubenswrapper[4868]: I1003 13:14:04.587997 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" podUID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerName="dnsmasq-dns" containerID="cri-o://af51c0ecbcb110c663097779aedb7069b6ef059fcc735b157c862f59848c0419" gracePeriod=10 Oct 03 13:14:04 crc kubenswrapper[4868]: I1003 13:14:04.590264 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.002092 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b55a39cc-8088-4a92-9976-b45a5e69ffd6","Type":"ContainerStarted","Data":"2cb42674cc0cd9bc2b517422fd7c505e2cdb4fcdac6777cd3e74fd883823fd68"} Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.002463 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b55a39cc-8088-4a92-9976-b45a5e69ffd6","Type":"ContainerStarted","Data":"0b51658f86f057f8b3230e295d2df222b4a124f83a02c49e0e2fb19b7df0623a"} Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.004289 4868 generic.go:334] "Generic (PLEG): container finished" podID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerID="af51c0ecbcb110c663097779aedb7069b6ef059fcc735b157c862f59848c0419" exitCode=0 Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.005378 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" event={"ID":"27f11be0-68b7-40ed-8722-96cbcc1119d0","Type":"ContainerDied","Data":"af51c0ecbcb110c663097779aedb7069b6ef059fcc735b157c862f59848c0419"} Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.025708 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.268351 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-n75ck"] Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.276300 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.280679 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.280860 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.285708 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-n75ck"] Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.296961 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.394818 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qzgjh\" (UniqueName: \"kubernetes.io/projected/27f11be0-68b7-40ed-8722-96cbcc1119d0-kube-api-access-qzgjh\") pod \"27f11be0-68b7-40ed-8722-96cbcc1119d0\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.394908 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-svc\") pod \"27f11be0-68b7-40ed-8722-96cbcc1119d0\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.394953 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-nb\") pod \"27f11be0-68b7-40ed-8722-96cbcc1119d0\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.396221 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-sb\") pod \"27f11be0-68b7-40ed-8722-96cbcc1119d0\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.396383 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-config\") pod \"27f11be0-68b7-40ed-8722-96cbcc1119d0\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.396452 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-swift-storage-0\") pod \"27f11be0-68b7-40ed-8722-96cbcc1119d0\" (UID: \"27f11be0-68b7-40ed-8722-96cbcc1119d0\") " Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.397048 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-scripts\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.397110 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.397270 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6lx7\" (UniqueName: \"kubernetes.io/projected/70a93443-6a9b-42e8-8c7d-c8df6560e009-kube-api-access-z6lx7\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.397416 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-config-data\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.405956 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27f11be0-68b7-40ed-8722-96cbcc1119d0-kube-api-access-qzgjh" (OuterVolumeSpecName: "kube-api-access-qzgjh") pod "27f11be0-68b7-40ed-8722-96cbcc1119d0" (UID: "27f11be0-68b7-40ed-8722-96cbcc1119d0"). InnerVolumeSpecName "kube-api-access-qzgjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.450249 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-config" (OuterVolumeSpecName: "config") pod "27f11be0-68b7-40ed-8722-96cbcc1119d0" (UID: "27f11be0-68b7-40ed-8722-96cbcc1119d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.455842 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "27f11be0-68b7-40ed-8722-96cbcc1119d0" (UID: "27f11be0-68b7-40ed-8722-96cbcc1119d0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.467443 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "27f11be0-68b7-40ed-8722-96cbcc1119d0" (UID: "27f11be0-68b7-40ed-8722-96cbcc1119d0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.494581 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "27f11be0-68b7-40ed-8722-96cbcc1119d0" (UID: "27f11be0-68b7-40ed-8722-96cbcc1119d0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500252 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6lx7\" (UniqueName: \"kubernetes.io/projected/70a93443-6a9b-42e8-8c7d-c8df6560e009-kube-api-access-z6lx7\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500350 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-config-data\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500427 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-scripts\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500446 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500529 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500541 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500555 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qzgjh\" (UniqueName: \"kubernetes.io/projected/27f11be0-68b7-40ed-8722-96cbcc1119d0-kube-api-access-qzgjh\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500568 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.500576 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.503558 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "27f11be0-68b7-40ed-8722-96cbcc1119d0" (UID: "27f11be0-68b7-40ed-8722-96cbcc1119d0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.505140 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.506433 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-scripts\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.508459 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-config-data\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.530751 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6lx7\" (UniqueName: \"kubernetes.io/projected/70a93443-6a9b-42e8-8c7d-c8df6560e009-kube-api-access-z6lx7\") pod \"nova-cell1-cell-mapping-n75ck\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.609382 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/27f11be0-68b7-40ed-8722-96cbcc1119d0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:05 crc kubenswrapper[4868]: I1003 13:14:05.623412 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.014723 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" event={"ID":"27f11be0-68b7-40ed-8722-96cbcc1119d0","Type":"ContainerDied","Data":"772bb2a6fa15f50e192f51e5ad2697d4f99020783bd28a2546c9b5b0b58459b5"} Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.014818 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-xx55n" Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.015324 4868 scope.go:117] "RemoveContainer" containerID="af51c0ecbcb110c663097779aedb7069b6ef059fcc735b157c862f59848c0419" Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.026612 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b55a39cc-8088-4a92-9976-b45a5e69ffd6","Type":"ContainerStarted","Data":"93af2ef7c79dd3f4e95f4819fb2f08a6099cea5710d1490bf9a7a6611ab7a6b9"} Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.051215 4868 scope.go:117] "RemoveContainer" containerID="7acd3819d427feb1ae6cb77e2164b79358e9594bf61655151164bfeec8251bfb" Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.063148 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-xx55n"] Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.071232 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-xx55n"] Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.129497 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-n75ck"] Oct 03 13:14:06 crc kubenswrapper[4868]: I1003 13:14:06.557272 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27f11be0-68b7-40ed-8722-96cbcc1119d0" path="/var/lib/kubelet/pods/27f11be0-68b7-40ed-8722-96cbcc1119d0/volumes" Oct 03 13:14:07 crc kubenswrapper[4868]: I1003 13:14:07.037261 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n75ck" event={"ID":"70a93443-6a9b-42e8-8c7d-c8df6560e009","Type":"ContainerStarted","Data":"216306e42fbd12051befacf7602b05e22d18cec292b4bc8dbd03af040c3a212d"} Oct 03 13:14:07 crc kubenswrapper[4868]: I1003 13:14:07.037327 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n75ck" event={"ID":"70a93443-6a9b-42e8-8c7d-c8df6560e009","Type":"ContainerStarted","Data":"597a174492a2b7aa07f874c6e9d2a22fffce17405714da130b17385258e1fa05"} Oct 03 13:14:07 crc kubenswrapper[4868]: I1003 13:14:07.067030 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-n75ck" podStartSLOduration=2.06699099 podStartE2EDuration="2.06699099s" podCreationTimestamp="2025-10-03 13:14:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:14:07.06139114 +0000 UTC m=+1443.271240206" watchObservedRunningTime="2025-10-03 13:14:07.06699099 +0000 UTC m=+1443.276840056" Oct 03 13:14:08 crc kubenswrapper[4868]: I1003 13:14:08.058233 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b55a39cc-8088-4a92-9976-b45a5e69ffd6","Type":"ContainerStarted","Data":"d5222ee0d0ddbf4cc408b864180a24f00af5665508cb8af01920488d647c6412"} Oct 03 13:14:08 crc kubenswrapper[4868]: I1003 13:14:08.059153 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Oct 03 13:14:08 crc kubenswrapper[4868]: I1003 13:14:08.089983 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.537610124 podStartE2EDuration="6.089956787s" podCreationTimestamp="2025-10-03 13:14:02 +0000 UTC" firstStartedPulling="2025-10-03 13:14:03.483237425 +0000 UTC m=+1439.693086491" lastFinishedPulling="2025-10-03 13:14:07.035584088 +0000 UTC m=+1443.245433154" observedRunningTime="2025-10-03 13:14:08.089427113 +0000 UTC m=+1444.299276299" watchObservedRunningTime="2025-10-03 13:14:08.089956787 +0000 UTC m=+1444.299805863" Oct 03 13:14:11 crc kubenswrapper[4868]: I1003 13:14:11.313610 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:14:11 crc kubenswrapper[4868]: I1003 13:14:11.314901 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:14:11 crc kubenswrapper[4868]: I1003 13:14:11.385911 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:14:11 crc kubenswrapper[4868]: I1003 13:14:11.387811 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:14:11 crc kubenswrapper[4868]: I1003 13:14:11.398315 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:14:12 crc kubenswrapper[4868]: I1003 13:14:12.095988 4868 generic.go:334] "Generic (PLEG): container finished" podID="70a93443-6a9b-42e8-8c7d-c8df6560e009" containerID="216306e42fbd12051befacf7602b05e22d18cec292b4bc8dbd03af040c3a212d" exitCode=0 Oct 03 13:14:12 crc kubenswrapper[4868]: I1003 13:14:12.096454 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n75ck" event={"ID":"70a93443-6a9b-42e8-8c7d-c8df6560e009","Type":"ContainerDied","Data":"216306e42fbd12051befacf7602b05e22d18cec292b4bc8dbd03af040c3a212d"} Oct 03 13:14:12 crc kubenswrapper[4868]: I1003 13:14:12.102499 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:14:12 crc kubenswrapper[4868]: I1003 13:14:12.329211 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:12 crc kubenswrapper[4868]: I1003 13:14:12.329543 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.198:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.460615 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.601928 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-scripts\") pod \"70a93443-6a9b-42e8-8c7d-c8df6560e009\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.601971 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-combined-ca-bundle\") pod \"70a93443-6a9b-42e8-8c7d-c8df6560e009\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.602492 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-config-data\") pod \"70a93443-6a9b-42e8-8c7d-c8df6560e009\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.602624 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z6lx7\" (UniqueName: \"kubernetes.io/projected/70a93443-6a9b-42e8-8c7d-c8df6560e009-kube-api-access-z6lx7\") pod \"70a93443-6a9b-42e8-8c7d-c8df6560e009\" (UID: \"70a93443-6a9b-42e8-8c7d-c8df6560e009\") " Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.607846 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-scripts" (OuterVolumeSpecName: "scripts") pod "70a93443-6a9b-42e8-8c7d-c8df6560e009" (UID: "70a93443-6a9b-42e8-8c7d-c8df6560e009"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.608556 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70a93443-6a9b-42e8-8c7d-c8df6560e009-kube-api-access-z6lx7" (OuterVolumeSpecName: "kube-api-access-z6lx7") pod "70a93443-6a9b-42e8-8c7d-c8df6560e009" (UID: "70a93443-6a9b-42e8-8c7d-c8df6560e009"). InnerVolumeSpecName "kube-api-access-z6lx7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.636657 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "70a93443-6a9b-42e8-8c7d-c8df6560e009" (UID: "70a93443-6a9b-42e8-8c7d-c8df6560e009"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.650408 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-config-data" (OuterVolumeSpecName: "config-data") pod "70a93443-6a9b-42e8-8c7d-c8df6560e009" (UID: "70a93443-6a9b-42e8-8c7d-c8df6560e009"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.706802 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.706852 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z6lx7\" (UniqueName: \"kubernetes.io/projected/70a93443-6a9b-42e8-8c7d-c8df6560e009-kube-api-access-z6lx7\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.706865 4868 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-scripts\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:13 crc kubenswrapper[4868]: I1003 13:14:13.706878 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/70a93443-6a9b-42e8-8c7d-c8df6560e009-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.122369 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-n75ck" event={"ID":"70a93443-6a9b-42e8-8c7d-c8df6560e009","Type":"ContainerDied","Data":"597a174492a2b7aa07f874c6e9d2a22fffce17405714da130b17385258e1fa05"} Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.122436 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="597a174492a2b7aa07f874c6e9d2a22fffce17405714da130b17385258e1fa05" Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.122457 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-n75ck" Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.313681 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.314243 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="8b44f2cd-df8b-45f0-ada0-3e8fb6093726" containerName="nova-scheduler-scheduler" containerID="cri-o://bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03" gracePeriod=30 Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.399553 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.399954 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-log" containerID="cri-o://19df6d71d80daf92f9a4cd4e608ff8fca22f8509c207cb6d659597248ad55c8c" gracePeriod=30 Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.400038 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-api" containerID="cri-o://4f5f148aeef9cf7c2649bf41561e25347ec254b068876892ee87945467f205a7" gracePeriod=30 Oct 03 13:14:14 crc kubenswrapper[4868]: I1003 13:14:14.416029 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:14:15 crc kubenswrapper[4868]: I1003 13:14:15.137327 4868 generic.go:334] "Generic (PLEG): container finished" podID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerID="19df6d71d80daf92f9a4cd4e608ff8fca22f8509c207cb6d659597248ad55c8c" exitCode=143 Oct 03 13:14:15 crc kubenswrapper[4868]: I1003 13:14:15.137508 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09a94677-e9df-4e5e-9a4a-3f07ec0f5944","Type":"ContainerDied","Data":"19df6d71d80daf92f9a4cd4e608ff8fca22f8509c207cb6d659597248ad55c8c"} Oct 03 13:14:15 crc kubenswrapper[4868]: I1003 13:14:15.138099 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-log" containerID="cri-o://e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1" gracePeriod=30 Oct 03 13:14:15 crc kubenswrapper[4868]: I1003 13:14:15.138210 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-metadata" containerID="cri-o://e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1" gracePeriod=30 Oct 03 13:14:16 crc kubenswrapper[4868]: I1003 13:14:16.150853 4868 generic.go:334] "Generic (PLEG): container finished" podID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerID="e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1" exitCode=143 Oct 03 13:14:16 crc kubenswrapper[4868]: I1003 13:14:16.150916 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"feb416aa-1925-4717-b68b-1e0edcf34b85","Type":"ContainerDied","Data":"e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1"} Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.072760 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-cj6pv"] Oct 03 13:14:18 crc kubenswrapper[4868]: E1003 13:14:18.074231 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerName="init" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.074251 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerName="init" Oct 03 13:14:18 crc kubenswrapper[4868]: E1003 13:14:18.074265 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70a93443-6a9b-42e8-8c7d-c8df6560e009" containerName="nova-manage" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.074272 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="70a93443-6a9b-42e8-8c7d-c8df6560e009" containerName="nova-manage" Oct 03 13:14:18 crc kubenswrapper[4868]: E1003 13:14:18.074309 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerName="dnsmasq-dns" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.074317 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerName="dnsmasq-dns" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.074586 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="70a93443-6a9b-42e8-8c7d-c8df6560e009" containerName="nova-manage" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.074601 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="27f11be0-68b7-40ed-8722-96cbcc1119d0" containerName="dnsmasq-dns" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.076519 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.094564 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cj6pv"] Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.193839 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-utilities\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.193956 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7225r\" (UniqueName: \"kubernetes.io/projected/f5e88420-0ee2-4452-8164-2576da15205a-kube-api-access-7225r\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.194206 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-catalog-content\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.195081 4868 generic.go:334] "Generic (PLEG): container finished" podID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerID="4f5f148aeef9cf7c2649bf41561e25347ec254b068876892ee87945467f205a7" exitCode=0 Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.195125 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09a94677-e9df-4e5e-9a4a-3f07ec0f5944","Type":"ContainerDied","Data":"4f5f148aeef9cf7c2649bf41561e25347ec254b068876892ee87945467f205a7"} Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.290433 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:41518->10.217.0.195:8775: read: connection reset by peer" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.290549 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.195:8775/\": read tcp 10.217.0.2:41512->10.217.0.195:8775: read: connection reset by peer" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.296259 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-catalog-content\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.296387 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-utilities\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.296535 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7225r\" (UniqueName: \"kubernetes.io/projected/f5e88420-0ee2-4452-8164-2576da15205a-kube-api-access-7225r\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.297638 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-catalog-content\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.297983 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-utilities\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.325489 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7225r\" (UniqueName: \"kubernetes.io/projected/f5e88420-0ee2-4452-8164-2576da15205a-kube-api-access-7225r\") pod \"redhat-operators-cj6pv\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.402925 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.604126 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.703884 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-config-data\") pod \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.703997 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvvxk\" (UniqueName: \"kubernetes.io/projected/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-kube-api-access-rvvxk\") pod \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.704074 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-combined-ca-bundle\") pod \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.704154 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-internal-tls-certs\") pod \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.704213 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-logs\") pod \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.704253 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-public-tls-certs\") pod \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\" (UID: \"09a94677-e9df-4e5e-9a4a-3f07ec0f5944\") " Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.705970 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-logs" (OuterVolumeSpecName: "logs") pod "09a94677-e9df-4e5e-9a4a-3f07ec0f5944" (UID: "09a94677-e9df-4e5e-9a4a-3f07ec0f5944"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.729680 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-kube-api-access-rvvxk" (OuterVolumeSpecName: "kube-api-access-rvvxk") pod "09a94677-e9df-4e5e-9a4a-3f07ec0f5944" (UID: "09a94677-e9df-4e5e-9a4a-3f07ec0f5944"). InnerVolumeSpecName "kube-api-access-rvvxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.750418 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "09a94677-e9df-4e5e-9a4a-3f07ec0f5944" (UID: "09a94677-e9df-4e5e-9a4a-3f07ec0f5944"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.770289 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-config-data" (OuterVolumeSpecName: "config-data") pod "09a94677-e9df-4e5e-9a4a-3f07ec0f5944" (UID: "09a94677-e9df-4e5e-9a4a-3f07ec0f5944"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.809779 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.809834 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.809850 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvvxk\" (UniqueName: \"kubernetes.io/projected/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-kube-api-access-rvvxk\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.809865 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.816828 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "09a94677-e9df-4e5e-9a4a-3f07ec0f5944" (UID: "09a94677-e9df-4e5e-9a4a-3f07ec0f5944"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.898113 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "09a94677-e9df-4e5e-9a4a-3f07ec0f5944" (UID: "09a94677-e9df-4e5e-9a4a-3f07ec0f5944"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.913115 4868 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:18 crc kubenswrapper[4868]: I1003 13:14:18.913171 4868 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/09a94677-e9df-4e5e-9a4a-3f07ec0f5944-public-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:18 crc kubenswrapper[4868]: E1003 13:14:18.942318 4868 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03 is running failed: container process not found" containerID="bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:14:18 crc kubenswrapper[4868]: E1003 13:14:18.943986 4868 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03 is running failed: container process not found" containerID="bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:14:18 crc kubenswrapper[4868]: E1003 13:14:18.944405 4868 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03 is running failed: container process not found" containerID="bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Oct 03 13:14:18 crc kubenswrapper[4868]: E1003 13:14:18.944446 4868 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03 is running failed: container process not found" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="8b44f2cd-df8b-45f0-ada0-3e8fb6093726" containerName="nova-scheduler-scheduler" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.050802 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.071684 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-cj6pv"] Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.218919 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-config-data\") pod \"feb416aa-1925-4717-b68b-1e0edcf34b85\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.219071 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-nova-metadata-tls-certs\") pod \"feb416aa-1925-4717-b68b-1e0edcf34b85\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.219264 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2dsnq\" (UniqueName: \"kubernetes.io/projected/feb416aa-1925-4717-b68b-1e0edcf34b85-kube-api-access-2dsnq\") pod \"feb416aa-1925-4717-b68b-1e0edcf34b85\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.219292 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-combined-ca-bundle\") pod \"feb416aa-1925-4717-b68b-1e0edcf34b85\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.219350 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb416aa-1925-4717-b68b-1e0edcf34b85-logs\") pod \"feb416aa-1925-4717-b68b-1e0edcf34b85\" (UID: \"feb416aa-1925-4717-b68b-1e0edcf34b85\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.220566 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/feb416aa-1925-4717-b68b-1e0edcf34b85-logs" (OuterVolumeSpecName: "logs") pod "feb416aa-1925-4717-b68b-1e0edcf34b85" (UID: "feb416aa-1925-4717-b68b-1e0edcf34b85"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.225975 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feb416aa-1925-4717-b68b-1e0edcf34b85-kube-api-access-2dsnq" (OuterVolumeSpecName: "kube-api-access-2dsnq") pod "feb416aa-1925-4717-b68b-1e0edcf34b85" (UID: "feb416aa-1925-4717-b68b-1e0edcf34b85"). InnerVolumeSpecName "kube-api-access-2dsnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.256532 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"09a94677-e9df-4e5e-9a4a-3f07ec0f5944","Type":"ContainerDied","Data":"6138443c3c93d2e29fec546f6caefdf6b236dd27aeaf409d229e54674bc34b73"} Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.257142 4868 scope.go:117] "RemoveContainer" containerID="4f5f148aeef9cf7c2649bf41561e25347ec254b068876892ee87945467f205a7" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.257387 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.278406 4868 generic.go:334] "Generic (PLEG): container finished" podID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerID="e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1" exitCode=0 Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.278505 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"feb416aa-1925-4717-b68b-1e0edcf34b85","Type":"ContainerDied","Data":"e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1"} Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.278555 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"feb416aa-1925-4717-b68b-1e0edcf34b85","Type":"ContainerDied","Data":"4ae5f63a298ce4adf38c45a6613c355ea39f768872bd9eee1b229d1514a7639e"} Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.278646 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.281530 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cj6pv" event={"ID":"f5e88420-0ee2-4452-8164-2576da15205a","Type":"ContainerStarted","Data":"c17804ceabec6f69726c4a9761081907ac32ffc9f731470954c21ce1f0641856"} Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.286616 4868 generic.go:334] "Generic (PLEG): container finished" podID="8b44f2cd-df8b-45f0-ada0-3e8fb6093726" containerID="bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03" exitCode=0 Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.286679 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b44f2cd-df8b-45f0-ada0-3e8fb6093726","Type":"ContainerDied","Data":"bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03"} Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.314466 4868 scope.go:117] "RemoveContainer" containerID="19df6d71d80daf92f9a4cd4e608ff8fca22f8509c207cb6d659597248ad55c8c" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.327017 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2dsnq\" (UniqueName: \"kubernetes.io/projected/feb416aa-1925-4717-b68b-1e0edcf34b85-kube-api-access-2dsnq\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.327148 4868 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/feb416aa-1925-4717-b68b-1e0edcf34b85-logs\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.358262 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.395603 4868 scope.go:117] "RemoveContainer" containerID="e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.396487 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.420257 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: E1003 13:14:19.421024 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-log" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421042 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-log" Oct 03 13:14:19 crc kubenswrapper[4868]: E1003 13:14:19.421079 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-metadata" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421086 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-metadata" Oct 03 13:14:19 crc kubenswrapper[4868]: E1003 13:14:19.421093 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-log" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421101 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-log" Oct 03 13:14:19 crc kubenswrapper[4868]: E1003 13:14:19.421125 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-api" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421132 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-api" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421378 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-api" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421425 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-log" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421440 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" containerName="nova-api-log" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.421460 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" containerName="nova-metadata-metadata" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.422726 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.432234 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.432434 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.432580 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.444014 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.454823 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-config-data" (OuterVolumeSpecName: "config-data") pod "feb416aa-1925-4717-b68b-1e0edcf34b85" (UID: "feb416aa-1925-4717-b68b-1e0edcf34b85"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.471023 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "feb416aa-1925-4717-b68b-1e0edcf34b85" (UID: "feb416aa-1925-4717-b68b-1e0edcf34b85"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.479854 4868 scope.go:117] "RemoveContainer" containerID="e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.492221 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.493141 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "feb416aa-1925-4717-b68b-1e0edcf34b85" (UID: "feb416aa-1925-4717-b68b-1e0edcf34b85"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535176 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-public-tls-certs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535256 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pdjzn\" (UniqueName: \"kubernetes.io/projected/f1d25601-e520-4e7b-99e2-d35e6568d838-kube-api-access-pdjzn\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535341 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535432 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535528 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-config-data\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535597 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d25601-e520-4e7b-99e2-d35e6568d838-logs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535659 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535671 4868 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.535680 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/feb416aa-1925-4717-b68b-1e0edcf34b85-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.550798 4868 scope.go:117] "RemoveContainer" containerID="e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1" Oct 03 13:14:19 crc kubenswrapper[4868]: E1003 13:14:19.551727 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1\": container with ID starting with e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1 not found: ID does not exist" containerID="e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.551764 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1"} err="failed to get container status \"e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1\": rpc error: code = NotFound desc = could not find container \"e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1\": container with ID starting with e603c6f3027effb43074b87e64bf20ecf84fcf9e2323a9b9096f034dc28981a1 not found: ID does not exist" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.551787 4868 scope.go:117] "RemoveContainer" containerID="e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1" Oct 03 13:14:19 crc kubenswrapper[4868]: E1003 13:14:19.556499 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1\": container with ID starting with e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1 not found: ID does not exist" containerID="e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.556581 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1"} err="failed to get container status \"e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1\": rpc error: code = NotFound desc = could not find container \"e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1\": container with ID starting with e59fe38060629a948afe0822d2451ec57be479acbf63eb3671cd49415808d9f1 not found: ID does not exist" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.626130 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.633562 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.637957 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-config-data\") pod \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.638129 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-combined-ca-bundle\") pod \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.638354 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8l9r9\" (UniqueName: \"kubernetes.io/projected/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-kube-api-access-8l9r9\") pod \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\" (UID: \"8b44f2cd-df8b-45f0-ada0-3e8fb6093726\") " Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.638714 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-public-tls-certs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.638789 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pdjzn\" (UniqueName: \"kubernetes.io/projected/f1d25601-e520-4e7b-99e2-d35e6568d838-kube-api-access-pdjzn\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.638870 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.638976 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.639046 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-config-data\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.639124 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d25601-e520-4e7b-99e2-d35e6568d838-logs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.642132 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f1d25601-e520-4e7b-99e2-d35e6568d838-logs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.650615 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-public-tls-certs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.651399 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.652016 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-internal-tls-certs\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.653374 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f1d25601-e520-4e7b-99e2-d35e6568d838-config-data\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.655681 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-kube-api-access-8l9r9" (OuterVolumeSpecName: "kube-api-access-8l9r9") pod "8b44f2cd-df8b-45f0-ada0-3e8fb6093726" (UID: "8b44f2cd-df8b-45f0-ada0-3e8fb6093726"). InnerVolumeSpecName "kube-api-access-8l9r9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.672563 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: E1003 13:14:19.673247 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b44f2cd-df8b-45f0-ada0-3e8fb6093726" containerName="nova-scheduler-scheduler" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.673276 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b44f2cd-df8b-45f0-ada0-3e8fb6093726" containerName="nova-scheduler-scheduler" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.673779 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b44f2cd-df8b-45f0-ada0-3e8fb6093726" containerName="nova-scheduler-scheduler" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.681746 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pdjzn\" (UniqueName: \"kubernetes.io/projected/f1d25601-e520-4e7b-99e2-d35e6568d838-kube-api-access-pdjzn\") pod \"nova-api-0\" (UID: \"f1d25601-e520-4e7b-99e2-d35e6568d838\") " pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.699077 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.703730 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.707371 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.740106 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.744698 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8l9r9\" (UniqueName: \"kubernetes.io/projected/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-kube-api-access-8l9r9\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.753805 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8b44f2cd-df8b-45f0-ada0-3e8fb6093726" (UID: "8b44f2cd-df8b-45f0-ada0-3e8fb6093726"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.778528 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.802824 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-config-data" (OuterVolumeSpecName: "config-data") pod "8b44f2cd-df8b-45f0-ada0-3e8fb6093726" (UID: "8b44f2cd-df8b-45f0-ada0-3e8fb6093726"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.848657 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsl4d\" (UniqueName: \"kubernetes.io/projected/b3e36433-b5ce-4428-ad2e-1bf31738b016-kube-api-access-nsl4d\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.849878 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.850031 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.850101 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-config-data\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.850253 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3e36433-b5ce-4428-ad2e-1bf31738b016-logs\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.850772 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.850841 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8b44f2cd-df8b-45f0-ada0-3e8fb6093726-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.953973 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsl4d\" (UniqueName: \"kubernetes.io/projected/b3e36433-b5ce-4428-ad2e-1bf31738b016-kube-api-access-nsl4d\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.954173 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.954252 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.954285 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-config-data\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.954378 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3e36433-b5ce-4428-ad2e-1bf31738b016-logs\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.955079 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b3e36433-b5ce-4428-ad2e-1bf31738b016-logs\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.962768 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.964841 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.967723 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b3e36433-b5ce-4428-ad2e-1bf31738b016-config-data\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:19 crc kubenswrapper[4868]: I1003 13:14:19.997841 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsl4d\" (UniqueName: \"kubernetes.io/projected/b3e36433-b5ce-4428-ad2e-1bf31738b016-kube-api-access-nsl4d\") pod \"nova-metadata-0\" (UID: \"b3e36433-b5ce-4428-ad2e-1bf31738b016\") " pod="openstack/nova-metadata-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.022209 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.306614 4868 generic.go:334] "Generic (PLEG): container finished" podID="f5e88420-0ee2-4452-8164-2576da15205a" containerID="8ae19eb4c27c33366b9964e6c6ad318d416b2ad13830dccc270aecc45a490be5" exitCode=0 Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.306848 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cj6pv" event={"ID":"f5e88420-0ee2-4452-8164-2576da15205a","Type":"ContainerDied","Data":"8ae19eb4c27c33366b9964e6c6ad318d416b2ad13830dccc270aecc45a490be5"} Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.312793 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"8b44f2cd-df8b-45f0-ada0-3e8fb6093726","Type":"ContainerDied","Data":"8f6785f7830606da0865d565a9c5d61a2e251c15d745845c44c6c90926f5dadf"} Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.312862 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.313065 4868 scope.go:117] "RemoveContainer" containerID="bb8d72dcae653c3396f00b86764282b2fff24cb99149c323faf28579257f8d03" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.364280 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.404523 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.419950 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.422416 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.425100 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.450770 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:14:20 crc kubenswrapper[4868]: W1003 13:14:20.456432 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf1d25601_e520_4e7b_99e2_d35e6568d838.slice/crio-200445053c9cef2f206a432daf9d7e8a07fb260914351ce2e778798e66035244 WatchSource:0}: Error finding container 200445053c9cef2f206a432daf9d7e8a07fb260914351ce2e778798e66035244: Status 404 returned error can't find the container with id 200445053c9cef2f206a432daf9d7e8a07fb260914351ce2e778798e66035244 Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.460066 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.558306 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09a94677-e9df-4e5e-9a4a-3f07ec0f5944" path="/var/lib/kubelet/pods/09a94677-e9df-4e5e-9a4a-3f07ec0f5944/volumes" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.558908 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8b44f2cd-df8b-45f0-ada0-3e8fb6093726" path="/var/lib/kubelet/pods/8b44f2cd-df8b-45f0-ada0-3e8fb6093726/volumes" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.559534 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feb416aa-1925-4717-b68b-1e0edcf34b85" path="/var/lib/kubelet/pods/feb416aa-1925-4717-b68b-1e0edcf34b85/volumes" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.568192 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e1d58c-c725-48c5-99f2-b3ce568d1136-config-data\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.568237 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e1d58c-c725-48c5-99f2-b3ce568d1136-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.568307 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sc87\" (UniqueName: \"kubernetes.io/projected/48e1d58c-c725-48c5-99f2-b3ce568d1136-kube-api-access-2sc87\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.598961 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Oct 03 13:14:20 crc kubenswrapper[4868]: W1003 13:14:20.607607 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3e36433_b5ce_4428_ad2e_1bf31738b016.slice/crio-4c2ef06d39fa14d0eaca78a413d9be2ed7c7d190fabe18f1d805b9092bebff96 WatchSource:0}: Error finding container 4c2ef06d39fa14d0eaca78a413d9be2ed7c7d190fabe18f1d805b9092bebff96: Status 404 returned error can't find the container with id 4c2ef06d39fa14d0eaca78a413d9be2ed7c7d190fabe18f1d805b9092bebff96 Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.672790 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e1d58c-c725-48c5-99f2-b3ce568d1136-config-data\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.673297 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e1d58c-c725-48c5-99f2-b3ce568d1136-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.673502 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sc87\" (UniqueName: \"kubernetes.io/projected/48e1d58c-c725-48c5-99f2-b3ce568d1136-kube-api-access-2sc87\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.683964 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48e1d58c-c725-48c5-99f2-b3ce568d1136-config-data\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.684330 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48e1d58c-c725-48c5-99f2-b3ce568d1136-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.699811 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sc87\" (UniqueName: \"kubernetes.io/projected/48e1d58c-c725-48c5-99f2-b3ce568d1136-kube-api-access-2sc87\") pod \"nova-scheduler-0\" (UID: \"48e1d58c-c725-48c5-99f2-b3ce568d1136\") " pod="openstack/nova-scheduler-0" Oct 03 13:14:20 crc kubenswrapper[4868]: I1003 13:14:20.749371 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:21.328857 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3e36433-b5ce-4428-ad2e-1bf31738b016","Type":"ContainerStarted","Data":"37afdfb97876ffd6b755f47c73c71a815d4f91abf9db448831920098635d2dc1"} Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:21.329360 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3e36433-b5ce-4428-ad2e-1bf31738b016","Type":"ContainerStarted","Data":"c610b1b33c1ad4bb2029474d694dcabcd932dd88f59b042f5ee40ec7be6f781b"} Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:21.329411 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"b3e36433-b5ce-4428-ad2e-1bf31738b016","Type":"ContainerStarted","Data":"4c2ef06d39fa14d0eaca78a413d9be2ed7c7d190fabe18f1d805b9092bebff96"} Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:21.332490 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d25601-e520-4e7b-99e2-d35e6568d838","Type":"ContainerStarted","Data":"9d7fea035cffe8bab50a5cd8b47b4710c120c0862e245aeb4ec048cb8dfbb63a"} Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:21.332569 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d25601-e520-4e7b-99e2-d35e6568d838","Type":"ContainerStarted","Data":"71848bba6d4c542d6da2a8d99c9c07098c453386817fc0691235355b5f2d0b28"} Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:21.332583 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f1d25601-e520-4e7b-99e2-d35e6568d838","Type":"ContainerStarted","Data":"200445053c9cef2f206a432daf9d7e8a07fb260914351ce2e778798e66035244"} Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:21.363226 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.363193647 podStartE2EDuration="2.363193647s" podCreationTimestamp="2025-10-03 13:14:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:14:21.358367158 +0000 UTC m=+1457.568216244" watchObservedRunningTime="2025-10-03 13:14:21.363193647 +0000 UTC m=+1457.573042713" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.346384 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cj6pv" event={"ID":"f5e88420-0ee2-4452-8164-2576da15205a","Type":"ContainerStarted","Data":"a5db6484126bfff14d5aa14b671f739448b8f34f7ff1726749d69791d2e8caed"} Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.369044 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.369013055 podStartE2EDuration="3.369013055s" podCreationTimestamp="2025-10-03 13:14:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:14:22.363676792 +0000 UTC m=+1458.573525878" watchObservedRunningTime="2025-10-03 13:14:22.369013055 +0000 UTC m=+1458.578862121" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.447553 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-j4clk"] Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.449966 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.465812 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j4clk"] Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.529262 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-catalog-content\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.529547 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-utilities\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.529755 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25wbm\" (UniqueName: \"kubernetes.io/projected/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-kube-api-access-25wbm\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.631257 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-catalog-content\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.631383 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-utilities\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.631479 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25wbm\" (UniqueName: \"kubernetes.io/projected/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-kube-api-access-25wbm\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.632096 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-catalog-content\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.632151 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-utilities\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.659660 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25wbm\" (UniqueName: \"kubernetes.io/projected/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-kube-api-access-25wbm\") pod \"community-operators-j4clk\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:22 crc kubenswrapper[4868]: I1003 13:14:22.775535 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:23 crc kubenswrapper[4868]: I1003 13:14:23.167061 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Oct 03 13:14:23 crc kubenswrapper[4868]: I1003 13:14:23.185241 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-j4clk"] Oct 03 13:14:23 crc kubenswrapper[4868]: I1003 13:14:23.361995 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"48e1d58c-c725-48c5-99f2-b3ce568d1136","Type":"ContainerStarted","Data":"1350b924a5a8e08553feba6da92477a3531ffbc3a81d12e500af45fac94778c3"} Oct 03 13:14:23 crc kubenswrapper[4868]: I1003 13:14:23.363091 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j4clk" event={"ID":"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f","Type":"ContainerStarted","Data":"ee717a08f929cf255c575cf4c6ce60f6f9b2d4a57d0820cf01526286424a2f9f"} Oct 03 13:14:23 crc kubenswrapper[4868]: I1003 13:14:23.365258 4868 generic.go:334] "Generic (PLEG): container finished" podID="f5e88420-0ee2-4452-8164-2576da15205a" containerID="a5db6484126bfff14d5aa14b671f739448b8f34f7ff1726749d69791d2e8caed" exitCode=0 Oct 03 13:14:23 crc kubenswrapper[4868]: I1003 13:14:23.365294 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cj6pv" event={"ID":"f5e88420-0ee2-4452-8164-2576da15205a","Type":"ContainerDied","Data":"a5db6484126bfff14d5aa14b671f739448b8f34f7ff1726749d69791d2e8caed"} Oct 03 13:14:24 crc kubenswrapper[4868]: I1003 13:14:24.382884 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"48e1d58c-c725-48c5-99f2-b3ce568d1136","Type":"ContainerStarted","Data":"ff6e5817c68e38fa638587b79e54e5fbfd27187450a73d1309c04ffddc847e32"} Oct 03 13:14:24 crc kubenswrapper[4868]: I1003 13:14:24.386172 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j4clk" event={"ID":"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f","Type":"ContainerStarted","Data":"f7302eebfcfefe82df2b9c3f952a38fa228c596adf6c162890263519bd2fd5c2"} Oct 03 13:14:24 crc kubenswrapper[4868]: I1003 13:14:24.389132 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cj6pv" event={"ID":"f5e88420-0ee2-4452-8164-2576da15205a","Type":"ContainerStarted","Data":"65b8b9b688691c24287b010d8985b9e31ccf012725458a0775137f5489426eef"} Oct 03 13:14:24 crc kubenswrapper[4868]: I1003 13:14:24.419423 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=4.419398118 podStartE2EDuration="4.419398118s" podCreationTimestamp="2025-10-03 13:14:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:14:24.407810497 +0000 UTC m=+1460.617659573" watchObservedRunningTime="2025-10-03 13:14:24.419398118 +0000 UTC m=+1460.629247184" Oct 03 13:14:24 crc kubenswrapper[4868]: I1003 13:14:24.458275 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-cj6pv" podStartSLOduration=2.906531163 podStartE2EDuration="6.458246889s" podCreationTimestamp="2025-10-03 13:14:18 +0000 UTC" firstStartedPulling="2025-10-03 13:14:20.309185148 +0000 UTC m=+1456.519034214" lastFinishedPulling="2025-10-03 13:14:23.860900874 +0000 UTC m=+1460.070749940" observedRunningTime="2025-10-03 13:14:24.449958377 +0000 UTC m=+1460.659807443" watchObservedRunningTime="2025-10-03 13:14:24.458246889 +0000 UTC m=+1460.668095955" Oct 03 13:14:25 crc kubenswrapper[4868]: I1003 13:14:25.023394 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:14:25 crc kubenswrapper[4868]: I1003 13:14:25.023685 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Oct 03 13:14:25 crc kubenswrapper[4868]: I1003 13:14:25.408346 4868 generic.go:334] "Generic (PLEG): container finished" podID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerID="f7302eebfcfefe82df2b9c3f952a38fa228c596adf6c162890263519bd2fd5c2" exitCode=0 Oct 03 13:14:25 crc kubenswrapper[4868]: I1003 13:14:25.408445 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j4clk" event={"ID":"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f","Type":"ContainerDied","Data":"f7302eebfcfefe82df2b9c3f952a38fa228c596adf6c162890263519bd2fd5c2"} Oct 03 13:14:25 crc kubenswrapper[4868]: I1003 13:14:25.750268 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Oct 03 13:14:27 crc kubenswrapper[4868]: I1003 13:14:27.431827 4868 generic.go:334] "Generic (PLEG): container finished" podID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerID="620a71f44697d9709808f1d37d5a357dcfd25b5a4690564dd4f1e5ce54108d52" exitCode=0 Oct 03 13:14:27 crc kubenswrapper[4868]: I1003 13:14:27.432023 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j4clk" event={"ID":"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f","Type":"ContainerDied","Data":"620a71f44697d9709808f1d37d5a357dcfd25b5a4690564dd4f1e5ce54108d52"} Oct 03 13:14:28 crc kubenswrapper[4868]: I1003 13:14:28.404558 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:28 crc kubenswrapper[4868]: I1003 13:14:28.405557 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:28 crc kubenswrapper[4868]: I1003 13:14:28.452232 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j4clk" event={"ID":"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f","Type":"ContainerStarted","Data":"ed5332c95608449d5144572d2fe6bdc2aa6bf10e5da512e4c5f978a26b84c555"} Oct 03 13:14:28 crc kubenswrapper[4868]: I1003 13:14:28.482939 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-j4clk" podStartSLOduration=3.88679484 podStartE2EDuration="6.482911405s" podCreationTimestamp="2025-10-03 13:14:22 +0000 UTC" firstStartedPulling="2025-10-03 13:14:25.416447509 +0000 UTC m=+1461.626296575" lastFinishedPulling="2025-10-03 13:14:28.012564074 +0000 UTC m=+1464.222413140" observedRunningTime="2025-10-03 13:14:28.476222225 +0000 UTC m=+1464.686071301" watchObservedRunningTime="2025-10-03 13:14:28.482911405 +0000 UTC m=+1464.692760481" Oct 03 13:14:29 crc kubenswrapper[4868]: I1003 13:14:29.465117 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-cj6pv" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="registry-server" probeResult="failure" output=< Oct 03 13:14:29 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 13:14:29 crc kubenswrapper[4868]: > Oct 03 13:14:29 crc kubenswrapper[4868]: I1003 13:14:29.788267 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:14:29 crc kubenswrapper[4868]: I1003 13:14:29.788810 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Oct 03 13:14:30 crc kubenswrapper[4868]: I1003 13:14:30.022708 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:14:30 crc kubenswrapper[4868]: I1003 13:14:30.022784 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Oct 03 13:14:30 crc kubenswrapper[4868]: I1003 13:14:30.750358 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Oct 03 13:14:30 crc kubenswrapper[4868]: I1003 13:14:30.783779 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Oct 03 13:14:30 crc kubenswrapper[4868]: I1003 13:14:30.803346 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f1d25601-e520-4e7b-99e2-d35e6568d838" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:30 crc kubenswrapper[4868]: I1003 13:14:30.803357 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f1d25601-e520-4e7b-99e2-d35e6568d838" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.202:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:31 crc kubenswrapper[4868]: I1003 13:14:31.038497 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b3e36433-b5ce-4428-ad2e-1bf31738b016" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:31 crc kubenswrapper[4868]: I1003 13:14:31.038497 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="b3e36433-b5ce-4428-ad2e-1bf31738b016" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.203:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Oct 03 13:14:31 crc kubenswrapper[4868]: I1003 13:14:31.517135 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Oct 03 13:14:32 crc kubenswrapper[4868]: I1003 13:14:32.776171 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:32 crc kubenswrapper[4868]: I1003 13:14:32.776529 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:32 crc kubenswrapper[4868]: I1003 13:14:32.834478 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:32 crc kubenswrapper[4868]: I1003 13:14:32.968558 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Oct 03 13:14:33 crc kubenswrapper[4868]: I1003 13:14:33.605483 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:33 crc kubenswrapper[4868]: I1003 13:14:33.716646 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j4clk"] Oct 03 13:14:35 crc kubenswrapper[4868]: I1003 13:14:35.527577 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-j4clk" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="registry-server" containerID="cri-o://ed5332c95608449d5144572d2fe6bdc2aa6bf10e5da512e4c5f978a26b84c555" gracePeriod=2 Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.544246 4868 generic.go:334] "Generic (PLEG): container finished" podID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerID="ed5332c95608449d5144572d2fe6bdc2aa6bf10e5da512e4c5f978a26b84c555" exitCode=0 Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.558636 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j4clk" event={"ID":"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f","Type":"ContainerDied","Data":"ed5332c95608449d5144572d2fe6bdc2aa6bf10e5da512e4c5f978a26b84c555"} Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.693864 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.870540 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-utilities\") pod \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.870654 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25wbm\" (UniqueName: \"kubernetes.io/projected/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-kube-api-access-25wbm\") pod \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.870701 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-catalog-content\") pod \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\" (UID: \"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f\") " Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.871340 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-utilities" (OuterVolumeSpecName: "utilities") pod "b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" (UID: "b6079e02-b3d8-4d94-99d7-c5f9190b1e2f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.877456 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-kube-api-access-25wbm" (OuterVolumeSpecName: "kube-api-access-25wbm") pod "b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" (UID: "b6079e02-b3d8-4d94-99d7-c5f9190b1e2f"). InnerVolumeSpecName "kube-api-access-25wbm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.917854 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" (UID: "b6079e02-b3d8-4d94-99d7-c5f9190b1e2f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.973480 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25wbm\" (UniqueName: \"kubernetes.io/projected/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-kube-api-access-25wbm\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.973530 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:36 crc kubenswrapper[4868]: I1003 13:14:36.973543 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:37 crc kubenswrapper[4868]: I1003 13:14:37.558409 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-j4clk" event={"ID":"b6079e02-b3d8-4d94-99d7-c5f9190b1e2f","Type":"ContainerDied","Data":"ee717a08f929cf255c575cf4c6ce60f6f9b2d4a57d0820cf01526286424a2f9f"} Oct 03 13:14:37 crc kubenswrapper[4868]: I1003 13:14:37.558481 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-j4clk" Oct 03 13:14:37 crc kubenswrapper[4868]: I1003 13:14:37.558488 4868 scope.go:117] "RemoveContainer" containerID="ed5332c95608449d5144572d2fe6bdc2aa6bf10e5da512e4c5f978a26b84c555" Oct 03 13:14:37 crc kubenswrapper[4868]: I1003 13:14:37.591846 4868 scope.go:117] "RemoveContainer" containerID="620a71f44697d9709808f1d37d5a357dcfd25b5a4690564dd4f1e5ce54108d52" Oct 03 13:14:37 crc kubenswrapper[4868]: I1003 13:14:37.597729 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-j4clk"] Oct 03 13:14:37 crc kubenswrapper[4868]: I1003 13:14:37.613767 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-j4clk"] Oct 03 13:14:37 crc kubenswrapper[4868]: I1003 13:14:37.654538 4868 scope.go:117] "RemoveContainer" containerID="f7302eebfcfefe82df2b9c3f952a38fa228c596adf6c162890263519bd2fd5c2" Oct 03 13:14:38 crc kubenswrapper[4868]: I1003 13:14:38.452173 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:38 crc kubenswrapper[4868]: I1003 13:14:38.506037 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:38 crc kubenswrapper[4868]: I1003 13:14:38.555945 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" path="/var/lib/kubelet/pods/b6079e02-b3d8-4d94-99d7-c5f9190b1e2f/volumes" Oct 03 13:14:38 crc kubenswrapper[4868]: I1003 13:14:38.932824 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cj6pv"] Oct 03 13:14:39 crc kubenswrapper[4868]: I1003 13:14:39.576504 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-cj6pv" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="registry-server" containerID="cri-o://65b8b9b688691c24287b010d8985b9e31ccf012725458a0775137f5489426eef" gracePeriod=2 Oct 03 13:14:39 crc kubenswrapper[4868]: I1003 13:14:39.794299 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:14:39 crc kubenswrapper[4868]: I1003 13:14:39.794671 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Oct 03 13:14:39 crc kubenswrapper[4868]: I1003 13:14:39.795281 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:14:39 crc kubenswrapper[4868]: I1003 13:14:39.795472 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Oct 03 13:14:39 crc kubenswrapper[4868]: I1003 13:14:39.803222 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:14:39 crc kubenswrapper[4868]: I1003 13:14:39.803349 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.028156 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.029110 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.034291 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.593724 4868 generic.go:334] "Generic (PLEG): container finished" podID="f5e88420-0ee2-4452-8164-2576da15205a" containerID="65b8b9b688691c24287b010d8985b9e31ccf012725458a0775137f5489426eef" exitCode=0 Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.593836 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cj6pv" event={"ID":"f5e88420-0ee2-4452-8164-2576da15205a","Type":"ContainerDied","Data":"65b8b9b688691c24287b010d8985b9e31ccf012725458a0775137f5489426eef"} Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.617431 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.739174 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.868570 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-utilities\") pod \"f5e88420-0ee2-4452-8164-2576da15205a\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.868743 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-catalog-content\") pod \"f5e88420-0ee2-4452-8164-2576da15205a\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.869034 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7225r\" (UniqueName: \"kubernetes.io/projected/f5e88420-0ee2-4452-8164-2576da15205a-kube-api-access-7225r\") pod \"f5e88420-0ee2-4452-8164-2576da15205a\" (UID: \"f5e88420-0ee2-4452-8164-2576da15205a\") " Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.869820 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-utilities" (OuterVolumeSpecName: "utilities") pod "f5e88420-0ee2-4452-8164-2576da15205a" (UID: "f5e88420-0ee2-4452-8164-2576da15205a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.871176 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.878480 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5e88420-0ee2-4452-8164-2576da15205a-kube-api-access-7225r" (OuterVolumeSpecName: "kube-api-access-7225r") pod "f5e88420-0ee2-4452-8164-2576da15205a" (UID: "f5e88420-0ee2-4452-8164-2576da15205a"). InnerVolumeSpecName "kube-api-access-7225r". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.946509 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "f5e88420-0ee2-4452-8164-2576da15205a" (UID: "f5e88420-0ee2-4452-8164-2576da15205a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.973543 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7225r\" (UniqueName: \"kubernetes.io/projected/f5e88420-0ee2-4452-8164-2576da15205a-kube-api-access-7225r\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:40 crc kubenswrapper[4868]: I1003 13:14:40.973579 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/f5e88420-0ee2-4452-8164-2576da15205a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:41 crc kubenswrapper[4868]: I1003 13:14:41.609211 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-cj6pv" event={"ID":"f5e88420-0ee2-4452-8164-2576da15205a","Type":"ContainerDied","Data":"c17804ceabec6f69726c4a9761081907ac32ffc9f731470954c21ce1f0641856"} Oct 03 13:14:41 crc kubenswrapper[4868]: I1003 13:14:41.609354 4868 scope.go:117] "RemoveContainer" containerID="65b8b9b688691c24287b010d8985b9e31ccf012725458a0775137f5489426eef" Oct 03 13:14:41 crc kubenswrapper[4868]: I1003 13:14:41.610960 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-cj6pv" Oct 03 13:14:41 crc kubenswrapper[4868]: I1003 13:14:41.661655 4868 scope.go:117] "RemoveContainer" containerID="a5db6484126bfff14d5aa14b671f739448b8f34f7ff1726749d69791d2e8caed" Oct 03 13:14:41 crc kubenswrapper[4868]: I1003 13:14:41.665609 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-cj6pv"] Oct 03 13:14:41 crc kubenswrapper[4868]: I1003 13:14:41.678263 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-cj6pv"] Oct 03 13:14:41 crc kubenswrapper[4868]: I1003 13:14:41.693889 4868 scope.go:117] "RemoveContainer" containerID="8ae19eb4c27c33366b9964e6c6ad318d416b2ad13830dccc270aecc45a490be5" Oct 03 13:14:42 crc kubenswrapper[4868]: I1003 13:14:42.555365 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5e88420-0ee2-4452-8164-2576da15205a" path="/var/lib/kubelet/pods/f5e88420-0ee2-4452-8164-2576da15205a/volumes" Oct 03 13:14:49 crc kubenswrapper[4868]: I1003 13:14:49.001380 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:14:50 crc kubenswrapper[4868]: I1003 13:14:50.629206 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:14:53 crc kubenswrapper[4868]: I1003 13:14:53.383242 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerName="rabbitmq" containerID="cri-o://feeccaadeb9938a2f31874ae5d739e7389c6c44f8e0be360c4b213c568c5afdd" gracePeriod=604796 Oct 03 13:14:55 crc kubenswrapper[4868]: I1003 13:14:55.386257 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerName="rabbitmq" containerID="cri-o://c7c70b0b002d4d2030b948ed263438815e14f295e91b125c61af989ec76d1fbf" gracePeriod=604796 Oct 03 13:14:57 crc kubenswrapper[4868]: I1003 13:14:57.035042 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.97:5671: connect: connection refused" Oct 03 13:14:57 crc kubenswrapper[4868]: I1003 13:14:57.093406 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: connect: connection refused" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.777046 4868 generic.go:334] "Generic (PLEG): container finished" podID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerID="feeccaadeb9938a2f31874ae5d739e7389c6c44f8e0be360c4b213c568c5afdd" exitCode=0 Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.777108 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd","Type":"ContainerDied","Data":"feeccaadeb9938a2f31874ae5d739e7389c6c44f8e0be360c4b213c568c5afdd"} Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.970028 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980224 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-config-data\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980278 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980307 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-pod-info\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980358 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-plugins-conf\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980388 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hdcmq\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-kube-api-access-hdcmq\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980439 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-server-conf\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980483 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-erlang-cookie\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980512 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-confd\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980543 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-erlang-cookie-secret\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980573 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-tls\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980612 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-plugins\") pod \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\" (UID: \"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd\") " Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980828 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.980979 4868 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.981116 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.981522 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.990288 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "persistence") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.990309 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.990298 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-pod-info" (OuterVolumeSpecName: "pod-info") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.990399 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-kube-api-access-hdcmq" (OuterVolumeSpecName: "kube-api-access-hdcmq") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "kube-api-access-hdcmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:14:59 crc kubenswrapper[4868]: I1003 13:14:59.996772 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.042153 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-config-data" (OuterVolumeSpecName: "config-data") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084110 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084388 4868 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084481 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084545 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084599 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084686 4868 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084763 4868 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.084827 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hdcmq\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-kube-api-access-hdcmq\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.113444 4868 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.121139 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-server-conf" (OuterVolumeSpecName: "server-conf") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.154579 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92"] Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155113 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerName="setup-container" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155131 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerName="setup-container" Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155143 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="registry-server" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155151 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="registry-server" Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155168 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="registry-server" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155175 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="registry-server" Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155183 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="extract-content" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155190 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="extract-content" Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155209 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerName="rabbitmq" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155216 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerName="rabbitmq" Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155231 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="extract-utilities" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155238 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="extract-utilities" Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155262 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="extract-utilities" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155270 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="extract-utilities" Oct 03 13:15:00 crc kubenswrapper[4868]: E1003 13:15:00.155286 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="extract-content" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155293 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="extract-content" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155569 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5e88420-0ee2-4452-8164-2576da15205a" containerName="registry-server" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155587 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" containerName="rabbitmq" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.155600 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6079e02-b3d8-4d94-99d7-c5f9190b1e2f" containerName="registry-server" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.156419 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.161015 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.161225 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.172595 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92"] Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.187426 4868 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.187868 4868 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.206901 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" (UID: "840fa4a7-3667-4e39-9d4a-3edb5c41d3fd"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.290624 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/767b70bd-7d85-4b49-b429-1fad0a5eac9c-secret-volume\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.291015 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fzzx2\" (UniqueName: \"kubernetes.io/projected/767b70bd-7d85-4b49-b429-1fad0a5eac9c-kube-api-access-fzzx2\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.291163 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/767b70bd-7d85-4b49-b429-1fad0a5eac9c-config-volume\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.291260 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.392823 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/767b70bd-7d85-4b49-b429-1fad0a5eac9c-secret-volume\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.393000 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fzzx2\" (UniqueName: \"kubernetes.io/projected/767b70bd-7d85-4b49-b429-1fad0a5eac9c-kube-api-access-fzzx2\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.393046 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/767b70bd-7d85-4b49-b429-1fad0a5eac9c-config-volume\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.394485 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/767b70bd-7d85-4b49-b429-1fad0a5eac9c-config-volume\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.398007 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/767b70bd-7d85-4b49-b429-1fad0a5eac9c-secret-volume\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.415928 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fzzx2\" (UniqueName: \"kubernetes.io/projected/767b70bd-7d85-4b49-b429-1fad0a5eac9c-kube-api-access-fzzx2\") pod \"collect-profiles-29324955-gnv92\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.480367 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.791702 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"840fa4a7-3667-4e39-9d4a-3edb5c41d3fd","Type":"ContainerDied","Data":"9e2261bd2ea9f14b5ce473529009684ee542aa1485e35ab9fa3810848ac50fbf"} Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.792225 4868 scope.go:117] "RemoveContainer" containerID="feeccaadeb9938a2f31874ae5d739e7389c6c44f8e0be360c4b213c568c5afdd" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.791958 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.820346 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.827432 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.848505 4868 scope.go:117] "RemoveContainer" containerID="50e1b41735c10e406a5cb897fadf8e51579bbb8ae1b7b47347c387d79cde3bf1" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.850850 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.852944 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.855846 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.856175 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-rxtgr" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.856333 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.856536 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.856643 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.856757 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.856779 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.880841 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:15:00 crc kubenswrapper[4868]: I1003 13:15:00.985231 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92"] Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.027155 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.027763 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/115a46e7-8030-4ef7-9567-252f2a2a1467-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.027870 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.028046 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tsjm\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-kube-api-access-4tsjm\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.028451 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.028512 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-server-conf\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.028755 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.029093 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.029140 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/115a46e7-8030-4ef7-9567-252f2a2a1467-pod-info\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.029251 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.029374 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-config-data\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131382 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/115a46e7-8030-4ef7-9567-252f2a2a1467-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131485 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131545 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tsjm\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-kube-api-access-4tsjm\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131574 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131599 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-server-conf\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131642 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131677 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131709 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/115a46e7-8030-4ef7-9567-252f2a2a1467-pod-info\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131738 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131777 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-config-data\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.131819 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.132402 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.134082 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.134239 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.134557 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-config-data\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.134868 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-server-conf\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.135680 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/115a46e7-8030-4ef7-9567-252f2a2a1467-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.138532 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.139259 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/115a46e7-8030-4ef7-9567-252f2a2a1467-pod-info\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.139414 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.157929 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/115a46e7-8030-4ef7-9567-252f2a2a1467-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.158112 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tsjm\" (UniqueName: \"kubernetes.io/projected/115a46e7-8030-4ef7-9567-252f2a2a1467-kube-api-access-4tsjm\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.184097 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"rabbitmq-server-0\" (UID: \"115a46e7-8030-4ef7-9567-252f2a2a1467\") " pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.229626 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.733320 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.806431 4868 generic.go:334] "Generic (PLEG): container finished" podID="767b70bd-7d85-4b49-b429-1fad0a5eac9c" containerID="e928ebf1948250fd3d2727dc59548a5395912da72e1496903b1845e5a550e404" exitCode=0 Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.806498 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" event={"ID":"767b70bd-7d85-4b49-b429-1fad0a5eac9c","Type":"ContainerDied","Data":"e928ebf1948250fd3d2727dc59548a5395912da72e1496903b1845e5a550e404"} Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.806529 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" event={"ID":"767b70bd-7d85-4b49-b429-1fad0a5eac9c","Type":"ContainerStarted","Data":"c0392f80f94448c56cf60f42136d4e1235f8af9fc923e800ee6de4f1fc25b421"} Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.809766 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"115a46e7-8030-4ef7-9567-252f2a2a1467","Type":"ContainerStarted","Data":"c4454a199ae265127408be61eabb4885fa4e6bccb26129d2c3be5ff9118493d3"} Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.813900 4868 generic.go:334] "Generic (PLEG): container finished" podID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerID="c7c70b0b002d4d2030b948ed263438815e14f295e91b125c61af989ec76d1fbf" exitCode=0 Oct 03 13:15:01 crc kubenswrapper[4868]: I1003 13:15:01.813959 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8e1f1515-2197-4124-83dc-382a70cd3e91","Type":"ContainerDied","Data":"c7c70b0b002d4d2030b948ed263438815e14f295e91b125c61af989ec76d1fbf"} Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.133208 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256507 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqcxf\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-kube-api-access-mqcxf\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256566 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-confd\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256638 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-tls\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256677 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256719 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-erlang-cookie\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256803 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-plugins-conf\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256832 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-server-conf\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256870 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8e1f1515-2197-4124-83dc-382a70cd3e91-pod-info\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256923 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-config-data\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256951 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-plugins\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.256986 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8e1f1515-2197-4124-83dc-382a70cd3e91-erlang-cookie-secret\") pod \"8e1f1515-2197-4124-83dc-382a70cd3e91\" (UID: \"8e1f1515-2197-4124-83dc-382a70cd3e91\") " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.257853 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.258887 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.258986 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.262673 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.262927 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8e1f1515-2197-4124-83dc-382a70cd3e91-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.265458 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/8e1f1515-2197-4124-83dc-382a70cd3e91-pod-info" (OuterVolumeSpecName: "pod-info") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.265613 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-kube-api-access-mqcxf" (OuterVolumeSpecName: "kube-api-access-mqcxf") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "kube-api-access-mqcxf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.267266 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.299875 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-config-data" (OuterVolumeSpecName: "config-data") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359332 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqcxf\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-kube-api-access-mqcxf\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359685 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359709 4868 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359718 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359727 4868 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-plugins-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359738 4868 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/8e1f1515-2197-4124-83dc-382a70cd3e91-pod-info\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359746 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359754 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.359762 4868 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/8e1f1515-2197-4124-83dc-382a70cd3e91-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.372469 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-server-conf" (OuterVolumeSpecName: "server-conf") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.403774 4868 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.411614 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "8e1f1515-2197-4124-83dc-382a70cd3e91" (UID: "8e1f1515-2197-4124-83dc-382a70cd3e91"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.462117 4868 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.462200 4868 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/8e1f1515-2197-4124-83dc-382a70cd3e91-server-conf\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.462213 4868 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/8e1f1515-2197-4124-83dc-382a70cd3e91-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.557788 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="840fa4a7-3667-4e39-9d4a-3edb5c41d3fd" path="/var/lib/kubelet/pods/840fa4a7-3667-4e39-9d4a-3edb5c41d3fd/volumes" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.826837 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"8e1f1515-2197-4124-83dc-382a70cd3e91","Type":"ContainerDied","Data":"1e12d04d3f21e5044157d4e03346e3d6239c0f08648a8b6e66f50ea03da433bd"} Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.826913 4868 scope.go:117] "RemoveContainer" containerID="c7c70b0b002d4d2030b948ed263438815e14f295e91b125c61af989ec76d1fbf" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.826865 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.857461 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.864372 4868 scope.go:117] "RemoveContainer" containerID="625f04b3a97858bf53e28d9d473c9007bd6767b393dbbec4bff35073cc8d592f" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.887446 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.897940 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:15:02 crc kubenswrapper[4868]: E1003 13:15:02.898458 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerName="setup-container" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.898479 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerName="setup-container" Oct 03 13:15:02 crc kubenswrapper[4868]: E1003 13:15:02.898499 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerName="rabbitmq" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.898506 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerName="rabbitmq" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.898716 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" containerName="rabbitmq" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.899812 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.901997 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.902221 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-t6qw8" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.902266 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.902547 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.902710 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.905648 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.905720 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Oct 03 13:15:02 crc kubenswrapper[4868]: I1003 13:15:02.910075 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076260 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076324 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076403 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076431 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mhcrv\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-kube-api-access-mhcrv\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076523 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076547 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076573 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076601 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076663 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076738 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.076773 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.089438 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-tv4gr"] Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.091186 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.101603 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.119285 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-tv4gr"] Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.179724 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181165 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181192 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181216 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mhcrv\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-kube-api-access-mhcrv\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181256 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-config\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181284 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181335 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181354 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181372 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181392 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181414 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181453 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181492 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181517 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181546 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181573 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181588 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8h5g\" (UniqueName: \"kubernetes.io/projected/51e669ec-cc7a-484e-8a41-78939a7aebc7-kube-api-access-s8h5g\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181646 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.181871 4868 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.182682 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.182691 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.184341 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.193152 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.207297 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.265863 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.265890 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.272563 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.275662 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.291505 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mhcrv\" (UniqueName: \"kubernetes.io/projected/0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd-kube-api-access-mhcrv\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.294917 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8h5g\" (UniqueName: \"kubernetes.io/projected/51e669ec-cc7a-484e-8a41-78939a7aebc7-kube-api-access-s8h5g\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.295204 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.295333 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-config\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.295391 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.295540 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.295683 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.295737 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.297794 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-config\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.297981 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.298422 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.298456 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.298563 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.319802 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.323559 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd\") " pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.328334 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8h5g\" (UniqueName: \"kubernetes.io/projected/51e669ec-cc7a-484e-8a41-78939a7aebc7-kube-api-access-s8h5g\") pod \"dnsmasq-dns-79bd4cc8c9-tv4gr\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.392162 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.456402 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.498431 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/767b70bd-7d85-4b49-b429-1fad0a5eac9c-config-volume\") pod \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.498665 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fzzx2\" (UniqueName: \"kubernetes.io/projected/767b70bd-7d85-4b49-b429-1fad0a5eac9c-kube-api-access-fzzx2\") pod \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.498886 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/767b70bd-7d85-4b49-b429-1fad0a5eac9c-secret-volume\") pod \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\" (UID: \"767b70bd-7d85-4b49-b429-1fad0a5eac9c\") " Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.499302 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/767b70bd-7d85-4b49-b429-1fad0a5eac9c-config-volume" (OuterVolumeSpecName: "config-volume") pod "767b70bd-7d85-4b49-b429-1fad0a5eac9c" (UID: "767b70bd-7d85-4b49-b429-1fad0a5eac9c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.499628 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/767b70bd-7d85-4b49-b429-1fad0a5eac9c-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.503940 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/767b70bd-7d85-4b49-b429-1fad0a5eac9c-kube-api-access-fzzx2" (OuterVolumeSpecName: "kube-api-access-fzzx2") pod "767b70bd-7d85-4b49-b429-1fad0a5eac9c" (UID: "767b70bd-7d85-4b49-b429-1fad0a5eac9c"). InnerVolumeSpecName "kube-api-access-fzzx2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.505215 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/767b70bd-7d85-4b49-b429-1fad0a5eac9c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "767b70bd-7d85-4b49-b429-1fad0a5eac9c" (UID: "767b70bd-7d85-4b49-b429-1fad0a5eac9c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.573968 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.601484 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fzzx2\" (UniqueName: \"kubernetes.io/projected/767b70bd-7d85-4b49-b429-1fad0a5eac9c-kube-api-access-fzzx2\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.601509 4868 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/767b70bd-7d85-4b49-b429-1fad0a5eac9c-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.838782 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"115a46e7-8030-4ef7-9567-252f2a2a1467","Type":"ContainerStarted","Data":"5784265986ee6327af90ad8d9d5f439de7b090162c09493e1d2bb71845b91899"} Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.845273 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" event={"ID":"767b70bd-7d85-4b49-b429-1fad0a5eac9c","Type":"ContainerDied","Data":"c0392f80f94448c56cf60f42136d4e1235f8af9fc923e800ee6de4f1fc25b421"} Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.845323 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c0392f80f94448c56cf60f42136d4e1235f8af9fc923e800ee6de4f1fc25b421" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.845392 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92" Oct 03 13:15:03 crc kubenswrapper[4868]: I1003 13:15:03.908698 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-tv4gr"] Oct 03 13:15:04 crc kubenswrapper[4868]: I1003 13:15:04.043260 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Oct 03 13:15:04 crc kubenswrapper[4868]: W1003 13:15:04.066811 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c74a43f_0f0e_4ec0_bec9_5abedaf5f1cd.slice/crio-80428855d2274a126eabccb0be0fa6e715342031feae68a1fef4baebdd154024 WatchSource:0}: Error finding container 80428855d2274a126eabccb0be0fa6e715342031feae68a1fef4baebdd154024: Status 404 returned error can't find the container with id 80428855d2274a126eabccb0be0fa6e715342031feae68a1fef4baebdd154024 Oct 03 13:15:04 crc kubenswrapper[4868]: I1003 13:15:04.560966 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e1f1515-2197-4124-83dc-382a70cd3e91" path="/var/lib/kubelet/pods/8e1f1515-2197-4124-83dc-382a70cd3e91/volumes" Oct 03 13:15:04 crc kubenswrapper[4868]: I1003 13:15:04.857575 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd","Type":"ContainerStarted","Data":"80428855d2274a126eabccb0be0fa6e715342031feae68a1fef4baebdd154024"} Oct 03 13:15:04 crc kubenswrapper[4868]: I1003 13:15:04.859603 4868 generic.go:334] "Generic (PLEG): container finished" podID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerID="0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7" exitCode=0 Oct 03 13:15:04 crc kubenswrapper[4868]: I1003 13:15:04.859682 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" event={"ID":"51e669ec-cc7a-484e-8a41-78939a7aebc7","Type":"ContainerDied","Data":"0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7"} Oct 03 13:15:04 crc kubenswrapper[4868]: I1003 13:15:04.859754 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" event={"ID":"51e669ec-cc7a-484e-8a41-78939a7aebc7","Type":"ContainerStarted","Data":"6dfbee63ca04f117bc9e9296f0078cc964c9bd433ddecb722a311099b00e3d51"} Oct 03 13:15:05 crc kubenswrapper[4868]: I1003 13:15:05.872882 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd","Type":"ContainerStarted","Data":"c7a5c737b9f5ee766e47c77f7d1eb44b979e3f6b3480e2ce31750fea4cc85e7d"} Oct 03 13:15:05 crc kubenswrapper[4868]: I1003 13:15:05.876600 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" event={"ID":"51e669ec-cc7a-484e-8a41-78939a7aebc7","Type":"ContainerStarted","Data":"5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69"} Oct 03 13:15:05 crc kubenswrapper[4868]: I1003 13:15:05.876897 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:05 crc kubenswrapper[4868]: I1003 13:15:05.925479 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" podStartSLOduration=2.925436 podStartE2EDuration="2.925436s" podCreationTimestamp="2025-10-03 13:15:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:15:05.920276661 +0000 UTC m=+1502.130125727" watchObservedRunningTime="2025-10-03 13:15:05.925436 +0000 UTC m=+1502.135285066" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.149012 4868 scope.go:117] "RemoveContainer" containerID="fe69a2935c5d64392a9ab31b1c0d0cc2fa16ff4c993299e61227ddf60b7d3eef" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.458267 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.521735 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6gqrq"] Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.522027 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" containerName="dnsmasq-dns" containerID="cri-o://d5e1fc736af22d706c7a227c1b7918b0a778e908fd408ef6eba93f64dfcfcdbf" gracePeriod=10 Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.691743 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55478c4467-8hpcf"] Oct 03 13:15:13 crc kubenswrapper[4868]: E1003 13:15:13.694086 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="767b70bd-7d85-4b49-b429-1fad0a5eac9c" containerName="collect-profiles" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.694162 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="767b70bd-7d85-4b49-b429-1fad0a5eac9c" containerName="collect-profiles" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.695244 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="767b70bd-7d85-4b49-b429-1fad0a5eac9c" containerName="collect-profiles" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.702045 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.739553 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-8hpcf"] Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.800604 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-dns-svc\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.800697 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.800787 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-config\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.800815 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.800924 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.800993 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpvdj\" (UniqueName: \"kubernetes.io/projected/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-kube-api-access-vpvdj\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.801022 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.903325 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.903429 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpvdj\" (UniqueName: \"kubernetes.io/projected/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-kube-api-access-vpvdj\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.903453 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.903484 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-dns-svc\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.903529 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.903589 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-config\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.903614 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.904226 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-openstack-edpm-ipam\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.904571 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-ovsdbserver-sb\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.904881 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-dns-svc\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.904860 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-dns-swift-storage-0\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.905184 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-ovsdbserver-nb\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.905247 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-config\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.927720 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpvdj\" (UniqueName: \"kubernetes.io/projected/cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48-kube-api-access-vpvdj\") pod \"dnsmasq-dns-55478c4467-8hpcf\" (UID: \"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48\") " pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.956039 4868 generic.go:334] "Generic (PLEG): container finished" podID="76f656ae-c9ea-41e9-8b01-15485b063729" containerID="d5e1fc736af22d706c7a227c1b7918b0a778e908fd408ef6eba93f64dfcfcdbf" exitCode=0 Oct 03 13:15:13 crc kubenswrapper[4868]: I1003 13:15:13.956112 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" event={"ID":"76f656ae-c9ea-41e9-8b01-15485b063729","Type":"ContainerDied","Data":"d5e1fc736af22d706c7a227c1b7918b0a778e908fd408ef6eba93f64dfcfcdbf"} Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.086073 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.565784 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55478c4467-8hpcf"] Oct 03 13:15:14 crc kubenswrapper[4868]: W1003 13:15:14.575573 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcdff8fa4_8a6c_4a18_bdee_6ec43e6d3d48.slice/crio-10f7fb067c63438ca59856e2511601b18da4ccfd319aa435b6c655bb9173373b WatchSource:0}: Error finding container 10f7fb067c63438ca59856e2511601b18da4ccfd319aa435b6c655bb9173373b: Status 404 returned error can't find the container with id 10f7fb067c63438ca59856e2511601b18da4ccfd319aa435b6c655bb9173373b Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.589984 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.718301 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-config\") pod \"76f656ae-c9ea-41e9-8b01-15485b063729\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.718731 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-nb\") pod \"76f656ae-c9ea-41e9-8b01-15485b063729\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.718760 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-swift-storage-0\") pod \"76f656ae-c9ea-41e9-8b01-15485b063729\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.718826 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-svc\") pod \"76f656ae-c9ea-41e9-8b01-15485b063729\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.718987 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-flc7n\" (UniqueName: \"kubernetes.io/projected/76f656ae-c9ea-41e9-8b01-15485b063729-kube-api-access-flc7n\") pod \"76f656ae-c9ea-41e9-8b01-15485b063729\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.719030 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-sb\") pod \"76f656ae-c9ea-41e9-8b01-15485b063729\" (UID: \"76f656ae-c9ea-41e9-8b01-15485b063729\") " Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.728417 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76f656ae-c9ea-41e9-8b01-15485b063729-kube-api-access-flc7n" (OuterVolumeSpecName: "kube-api-access-flc7n") pod "76f656ae-c9ea-41e9-8b01-15485b063729" (UID: "76f656ae-c9ea-41e9-8b01-15485b063729"). InnerVolumeSpecName "kube-api-access-flc7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.790870 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-config" (OuterVolumeSpecName: "config") pod "76f656ae-c9ea-41e9-8b01-15485b063729" (UID: "76f656ae-c9ea-41e9-8b01-15485b063729"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.804879 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76f656ae-c9ea-41e9-8b01-15485b063729" (UID: "76f656ae-c9ea-41e9-8b01-15485b063729"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.812009 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76f656ae-c9ea-41e9-8b01-15485b063729" (UID: "76f656ae-c9ea-41e9-8b01-15485b063729"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.817798 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "76f656ae-c9ea-41e9-8b01-15485b063729" (UID: "76f656ae-c9ea-41e9-8b01-15485b063729"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.822110 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-flc7n\" (UniqueName: \"kubernetes.io/projected/76f656ae-c9ea-41e9-8b01-15485b063729-kube-api-access-flc7n\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.823459 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.823482 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.823494 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.823506 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.834378 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76f656ae-c9ea-41e9-8b01-15485b063729" (UID: "76f656ae-c9ea-41e9-8b01-15485b063729"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.926765 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76f656ae-c9ea-41e9-8b01-15485b063729-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.982207 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" event={"ID":"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48","Type":"ContainerStarted","Data":"10f7fb067c63438ca59856e2511601b18da4ccfd319aa435b6c655bb9173373b"} Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.986430 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" event={"ID":"76f656ae-c9ea-41e9-8b01-15485b063729","Type":"ContainerDied","Data":"4d3cb025c33d82c8a3e192faba7da76af70caf451bfaaf3562bb20b0f4192ba2"} Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.986465 4868 scope.go:117] "RemoveContainer" containerID="d5e1fc736af22d706c7a227c1b7918b0a778e908fd408ef6eba93f64dfcfcdbf" Oct 03 13:15:14 crc kubenswrapper[4868]: I1003 13:15:14.986505 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" Oct 03 13:15:15 crc kubenswrapper[4868]: I1003 13:15:15.013998 4868 scope.go:117] "RemoveContainer" containerID="17421209ee7d31e74be9f41663a25fc26200a742fc6cc15d469d77ba8ecb101b" Oct 03 13:15:15 crc kubenswrapper[4868]: I1003 13:15:15.029629 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6gqrq"] Oct 03 13:15:15 crc kubenswrapper[4868]: I1003 13:15:15.039818 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-6gqrq"] Oct 03 13:15:16 crc kubenswrapper[4868]: I1003 13:15:16.002246 4868 generic.go:334] "Generic (PLEG): container finished" podID="cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48" containerID="a0748da665a146fa5efa5e6bbc3accf1ffe464d45d4529b8e9aaa9d7c8332b0b" exitCode=0 Oct 03 13:15:16 crc kubenswrapper[4868]: I1003 13:15:16.002376 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" event={"ID":"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48","Type":"ContainerDied","Data":"a0748da665a146fa5efa5e6bbc3accf1ffe464d45d4529b8e9aaa9d7c8332b0b"} Oct 03 13:15:16 crc kubenswrapper[4868]: I1003 13:15:16.554157 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" path="/var/lib/kubelet/pods/76f656ae-c9ea-41e9-8b01-15485b063729/volumes" Oct 03 13:15:17 crc kubenswrapper[4868]: I1003 13:15:17.014496 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" event={"ID":"cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48","Type":"ContainerStarted","Data":"ce6182d91dc5679b3cd0b467fafc14582d561600056569c249ccf5e13137c79b"} Oct 03 13:15:17 crc kubenswrapper[4868]: I1003 13:15:17.014810 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:17 crc kubenswrapper[4868]: I1003 13:15:17.038103 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" podStartSLOduration=4.038083952 podStartE2EDuration="4.038083952s" podCreationTimestamp="2025-10-03 13:15:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:15:17.037449094 +0000 UTC m=+1513.247298180" watchObservedRunningTime="2025-10-03 13:15:17.038083952 +0000 UTC m=+1513.247933028" Oct 03 13:15:19 crc kubenswrapper[4868]: I1003 13:15:19.510333 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-89c5cd4d5-6gqrq" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.196:5353: i/o timeout" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.088322 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55478c4467-8hpcf" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.169684 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-tv4gr"] Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.170365 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" podUID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerName="dnsmasq-dns" containerID="cri-o://5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69" gracePeriod=10 Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.690573 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.749328 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-swift-storage-0\") pod \"51e669ec-cc7a-484e-8a41-78939a7aebc7\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.749805 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-sb\") pod \"51e669ec-cc7a-484e-8a41-78939a7aebc7\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.749899 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-config\") pod \"51e669ec-cc7a-484e-8a41-78939a7aebc7\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.749972 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s8h5g\" (UniqueName: \"kubernetes.io/projected/51e669ec-cc7a-484e-8a41-78939a7aebc7-kube-api-access-s8h5g\") pod \"51e669ec-cc7a-484e-8a41-78939a7aebc7\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.749996 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-svc\") pod \"51e669ec-cc7a-484e-8a41-78939a7aebc7\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.750014 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-openstack-edpm-ipam\") pod \"51e669ec-cc7a-484e-8a41-78939a7aebc7\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.750069 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-nb\") pod \"51e669ec-cc7a-484e-8a41-78939a7aebc7\" (UID: \"51e669ec-cc7a-484e-8a41-78939a7aebc7\") " Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.775463 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51e669ec-cc7a-484e-8a41-78939a7aebc7-kube-api-access-s8h5g" (OuterVolumeSpecName: "kube-api-access-s8h5g") pod "51e669ec-cc7a-484e-8a41-78939a7aebc7" (UID: "51e669ec-cc7a-484e-8a41-78939a7aebc7"). InnerVolumeSpecName "kube-api-access-s8h5g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.823857 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "51e669ec-cc7a-484e-8a41-78939a7aebc7" (UID: "51e669ec-cc7a-484e-8a41-78939a7aebc7"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.831593 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "51e669ec-cc7a-484e-8a41-78939a7aebc7" (UID: "51e669ec-cc7a-484e-8a41-78939a7aebc7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.832516 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "51e669ec-cc7a-484e-8a41-78939a7aebc7" (UID: "51e669ec-cc7a-484e-8a41-78939a7aebc7"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.835240 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "51e669ec-cc7a-484e-8a41-78939a7aebc7" (UID: "51e669ec-cc7a-484e-8a41-78939a7aebc7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.850177 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-config" (OuterVolumeSpecName: "config") pod "51e669ec-cc7a-484e-8a41-78939a7aebc7" (UID: "51e669ec-cc7a-484e-8a41-78939a7aebc7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.851991 4868 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-config\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.852017 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s8h5g\" (UniqueName: \"kubernetes.io/projected/51e669ec-cc7a-484e-8a41-78939a7aebc7-kube-api-access-s8h5g\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.852032 4868 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-svc\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.852042 4868 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.852062 4868 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.852071 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.856272 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "51e669ec-cc7a-484e-8a41-78939a7aebc7" (UID: "51e669ec-cc7a-484e-8a41-78939a7aebc7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:15:24 crc kubenswrapper[4868]: I1003 13:15:24.953568 4868 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/51e669ec-cc7a-484e-8a41-78939a7aebc7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.098786 4868 generic.go:334] "Generic (PLEG): container finished" podID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerID="5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69" exitCode=0 Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.098844 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" event={"ID":"51e669ec-cc7a-484e-8a41-78939a7aebc7","Type":"ContainerDied","Data":"5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69"} Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.098898 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" event={"ID":"51e669ec-cc7a-484e-8a41-78939a7aebc7","Type":"ContainerDied","Data":"6dfbee63ca04f117bc9e9296f0078cc964c9bd433ddecb722a311099b00e3d51"} Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.098918 4868 scope.go:117] "RemoveContainer" containerID="5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.098930 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-tv4gr" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.122523 4868 scope.go:117] "RemoveContainer" containerID="0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.147599 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-tv4gr"] Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.163487 4868 scope.go:117] "RemoveContainer" containerID="5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.163994 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-tv4gr"] Oct 03 13:15:25 crc kubenswrapper[4868]: E1003 13:15:25.165676 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69\": container with ID starting with 5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69 not found: ID does not exist" containerID="5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.165765 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69"} err="failed to get container status \"5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69\": rpc error: code = NotFound desc = could not find container \"5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69\": container with ID starting with 5344daf16cd42164179dd129a12a0f110669a9ab8511411b79578a9346260c69 not found: ID does not exist" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.165807 4868 scope.go:117] "RemoveContainer" containerID="0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7" Oct 03 13:15:25 crc kubenswrapper[4868]: E1003 13:15:25.166409 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7\": container with ID starting with 0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7 not found: ID does not exist" containerID="0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7" Oct 03 13:15:25 crc kubenswrapper[4868]: I1003 13:15:25.166473 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7"} err="failed to get container status \"0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7\": rpc error: code = NotFound desc = could not find container \"0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7\": container with ID starting with 0e4e44fc8105fa6b98fe4755eae87e51752b34c1748d453e4977ef6fd4e14ea7 not found: ID does not exist" Oct 03 13:15:26 crc kubenswrapper[4868]: I1003 13:15:26.555455 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51e669ec-cc7a-484e-8a41-78939a7aebc7" path="/var/lib/kubelet/pods/51e669ec-cc7a-484e-8a41-78939a7aebc7/volumes" Oct 03 13:15:32 crc kubenswrapper[4868]: I1003 13:15:32.145530 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:15:32 crc kubenswrapper[4868]: I1003 13:15:32.146118 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:15:36 crc kubenswrapper[4868]: I1003 13:15:36.217641 4868 generic.go:334] "Generic (PLEG): container finished" podID="115a46e7-8030-4ef7-9567-252f2a2a1467" containerID="5784265986ee6327af90ad8d9d5f439de7b090162c09493e1d2bb71845b91899" exitCode=0 Oct 03 13:15:36 crc kubenswrapper[4868]: I1003 13:15:36.217727 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"115a46e7-8030-4ef7-9567-252f2a2a1467","Type":"ContainerDied","Data":"5784265986ee6327af90ad8d9d5f439de7b090162c09493e1d2bb71845b91899"} Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.231336 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"115a46e7-8030-4ef7-9567-252f2a2a1467","Type":"ContainerStarted","Data":"fb42e0944f388ebf43f2755f56498f7dc4304dd8a3146c834253c867d48caa7d"} Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.232310 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.268465 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.268445341 podStartE2EDuration="37.268445341s" podCreationTimestamp="2025-10-03 13:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:15:37.255859403 +0000 UTC m=+1533.465708469" watchObservedRunningTime="2025-10-03 13:15:37.268445341 +0000 UTC m=+1533.478294397" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.309081 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff"] Oct 03 13:15:37 crc kubenswrapper[4868]: E1003 13:15:37.309624 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" containerName="init" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.309649 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" containerName="init" Oct 03 13:15:37 crc kubenswrapper[4868]: E1003 13:15:37.309695 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" containerName="dnsmasq-dns" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.309704 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" containerName="dnsmasq-dns" Oct 03 13:15:37 crc kubenswrapper[4868]: E1003 13:15:37.309720 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerName="init" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.309729 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerName="init" Oct 03 13:15:37 crc kubenswrapper[4868]: E1003 13:15:37.309751 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerName="dnsmasq-dns" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.309758 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerName="dnsmasq-dns" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.309997 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="76f656ae-c9ea-41e9-8b01-15485b063729" containerName="dnsmasq-dns" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.310020 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="51e669ec-cc7a-484e-8a41-78939a7aebc7" containerName="dnsmasq-dns" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.310860 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.314116 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.314217 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.315748 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.316612 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.322285 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff"] Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.430230 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4slx\" (UniqueName: \"kubernetes.io/projected/e8ebaf9b-70a4-44de-8873-d9ff816819a1-kube-api-access-f4slx\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.431003 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.431226 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.431274 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.533019 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.533114 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.533137 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.533167 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4slx\" (UniqueName: \"kubernetes.io/projected/e8ebaf9b-70a4-44de-8873-d9ff816819a1-kube-api-access-f4slx\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.541808 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.545876 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.551720 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.557074 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4slx\" (UniqueName: \"kubernetes.io/projected/e8ebaf9b-70a4-44de-8873-d9ff816819a1-kube-api-access-f4slx\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-mctff\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:37 crc kubenswrapper[4868]: I1003 13:15:37.638234 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.021872 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff"] Oct 03 13:15:38 crc kubenswrapper[4868]: W1003 13:15:38.033857 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode8ebaf9b_70a4_44de_8873_d9ff816819a1.slice/crio-3b1e30b7f3c0acec27e68c3fa0fa4d28c537ba11c95482639ead57c94a6fa338 WatchSource:0}: Error finding container 3b1e30b7f3c0acec27e68c3fa0fa4d28c537ba11c95482639ead57c94a6fa338: Status 404 returned error can't find the container with id 3b1e30b7f3c0acec27e68c3fa0fa4d28c537ba11c95482639ead57c94a6fa338 Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.223586 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-75dl4"] Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.261272 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.272433 4868 generic.go:334] "Generic (PLEG): container finished" podID="0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd" containerID="c7a5c737b9f5ee766e47c77f7d1eb44b979e3f6b3480e2ce31750fea4cc85e7d" exitCode=0 Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.272525 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd","Type":"ContainerDied","Data":"c7a5c737b9f5ee766e47c77f7d1eb44b979e3f6b3480e2ce31750fea4cc85e7d"} Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.276386 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-75dl4"] Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.279635 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" event={"ID":"e8ebaf9b-70a4-44de-8873-d9ff816819a1","Type":"ContainerStarted","Data":"3b1e30b7f3c0acec27e68c3fa0fa4d28c537ba11c95482639ead57c94a6fa338"} Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.362684 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mw2sj\" (UniqueName: \"kubernetes.io/projected/25199e98-91f1-4387-9c22-45a6d83a90c9-kube-api-access-mw2sj\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.363708 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-utilities\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.363824 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-catalog-content\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.466249 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-utilities\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.466336 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-catalog-content\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.466458 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mw2sj\" (UniqueName: \"kubernetes.io/projected/25199e98-91f1-4387-9c22-45a6d83a90c9-kube-api-access-mw2sj\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.467201 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-catalog-content\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.467395 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-utilities\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.486206 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mw2sj\" (UniqueName: \"kubernetes.io/projected/25199e98-91f1-4387-9c22-45a6d83a90c9-kube-api-access-mw2sj\") pod \"certified-operators-75dl4\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:38 crc kubenswrapper[4868]: I1003 13:15:38.588456 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:39 crc kubenswrapper[4868]: I1003 13:15:39.182659 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-75dl4"] Oct 03 13:15:39 crc kubenswrapper[4868]: W1003 13:15:39.199644 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25199e98_91f1_4387_9c22_45a6d83a90c9.slice/crio-019dc2bd05e867e2eb39017cca6e23427d8c3ff8de65e5e0869e02881daee134 WatchSource:0}: Error finding container 019dc2bd05e867e2eb39017cca6e23427d8c3ff8de65e5e0869e02881daee134: Status 404 returned error can't find the container with id 019dc2bd05e867e2eb39017cca6e23427d8c3ff8de65e5e0869e02881daee134 Oct 03 13:15:39 crc kubenswrapper[4868]: I1003 13:15:39.293405 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-75dl4" event={"ID":"25199e98-91f1-4387-9c22-45a6d83a90c9","Type":"ContainerStarted","Data":"019dc2bd05e867e2eb39017cca6e23427d8c3ff8de65e5e0869e02881daee134"} Oct 03 13:15:39 crc kubenswrapper[4868]: I1003 13:15:39.295825 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd","Type":"ContainerStarted","Data":"0b2f592bfaeb775fb194efc7b03a58af958ab127ebb46971f869fc6f3b558ae1"} Oct 03 13:15:39 crc kubenswrapper[4868]: I1003 13:15:39.296130 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:39 crc kubenswrapper[4868]: I1003 13:15:39.342772 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.342751765 podStartE2EDuration="37.342751765s" podCreationTimestamp="2025-10-03 13:15:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:15:39.336416155 +0000 UTC m=+1535.546265221" watchObservedRunningTime="2025-10-03 13:15:39.342751765 +0000 UTC m=+1535.552600831" Oct 03 13:15:40 crc kubenswrapper[4868]: I1003 13:15:40.316041 4868 generic.go:334] "Generic (PLEG): container finished" podID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerID="bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797" exitCode=0 Oct 03 13:15:40 crc kubenswrapper[4868]: I1003 13:15:40.316165 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-75dl4" event={"ID":"25199e98-91f1-4387-9c22-45a6d83a90c9","Type":"ContainerDied","Data":"bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797"} Oct 03 13:15:41 crc kubenswrapper[4868]: I1003 13:15:41.340206 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-75dl4" event={"ID":"25199e98-91f1-4387-9c22-45a6d83a90c9","Type":"ContainerStarted","Data":"c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d"} Oct 03 13:15:44 crc kubenswrapper[4868]: I1003 13:15:44.374582 4868 generic.go:334] "Generic (PLEG): container finished" podID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerID="c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d" exitCode=0 Oct 03 13:15:44 crc kubenswrapper[4868]: I1003 13:15:44.374727 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-75dl4" event={"ID":"25199e98-91f1-4387-9c22-45a6d83a90c9","Type":"ContainerDied","Data":"c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d"} Oct 03 13:15:51 crc kubenswrapper[4868]: I1003 13:15:51.233285 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="115a46e7-8030-4ef7-9567-252f2a2a1467" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.207:5671: connect: connection refused" Oct 03 13:15:53 crc kubenswrapper[4868]: I1003 13:15:53.580369 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Oct 03 13:15:54 crc kubenswrapper[4868]: I1003 13:15:54.468666 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" event={"ID":"e8ebaf9b-70a4-44de-8873-d9ff816819a1","Type":"ContainerStarted","Data":"3ab5be04c99b392152013c0c6bfaf4c87d8cfd95a6aa09f5d6e45569e325b196"} Oct 03 13:15:54 crc kubenswrapper[4868]: I1003 13:15:54.472762 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-75dl4" event={"ID":"25199e98-91f1-4387-9c22-45a6d83a90c9","Type":"ContainerStarted","Data":"afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817"} Oct 03 13:15:54 crc kubenswrapper[4868]: I1003 13:15:54.516736 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" podStartSLOduration=1.90243531 podStartE2EDuration="17.516714716s" podCreationTimestamp="2025-10-03 13:15:37 +0000 UTC" firstStartedPulling="2025-10-03 13:15:38.037047698 +0000 UTC m=+1534.246896764" lastFinishedPulling="2025-10-03 13:15:53.651327104 +0000 UTC m=+1549.861176170" observedRunningTime="2025-10-03 13:15:54.492689991 +0000 UTC m=+1550.702539067" watchObservedRunningTime="2025-10-03 13:15:54.516714716 +0000 UTC m=+1550.726563782" Oct 03 13:15:54 crc kubenswrapper[4868]: I1003 13:15:54.519473 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-75dl4" podStartSLOduration=3.204734146 podStartE2EDuration="16.519461649s" podCreationTimestamp="2025-10-03 13:15:38 +0000 UTC" firstStartedPulling="2025-10-03 13:15:40.320301914 +0000 UTC m=+1536.530150980" lastFinishedPulling="2025-10-03 13:15:53.635029417 +0000 UTC m=+1549.844878483" observedRunningTime="2025-10-03 13:15:54.510892089 +0000 UTC m=+1550.720741155" watchObservedRunningTime="2025-10-03 13:15:54.519461649 +0000 UTC m=+1550.729310725" Oct 03 13:15:58 crc kubenswrapper[4868]: I1003 13:15:58.590048 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:58 crc kubenswrapper[4868]: I1003 13:15:58.591191 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:58 crc kubenswrapper[4868]: I1003 13:15:58.636014 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:59 crc kubenswrapper[4868]: I1003 13:15:59.568073 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:15:59 crc kubenswrapper[4868]: I1003 13:15:59.614776 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-75dl4"] Oct 03 13:16:01 crc kubenswrapper[4868]: I1003 13:16:01.233314 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Oct 03 13:16:01 crc kubenswrapper[4868]: I1003 13:16:01.544834 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-75dl4" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="registry-server" containerID="cri-o://afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817" gracePeriod=2 Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.145599 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.146036 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.548976 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.558755 4868 generic.go:334] "Generic (PLEG): container finished" podID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerID="afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817" exitCode=0 Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.558804 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-75dl4" event={"ID":"25199e98-91f1-4387-9c22-45a6d83a90c9","Type":"ContainerDied","Data":"afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817"} Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.558808 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-75dl4" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.558845 4868 scope.go:117] "RemoveContainer" containerID="afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.558831 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-75dl4" event={"ID":"25199e98-91f1-4387-9c22-45a6d83a90c9","Type":"ContainerDied","Data":"019dc2bd05e867e2eb39017cca6e23427d8c3ff8de65e5e0869e02881daee134"} Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.585891 4868 scope.go:117] "RemoveContainer" containerID="c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.602742 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-catalog-content\") pod \"25199e98-91f1-4387-9c22-45a6d83a90c9\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.603487 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mw2sj\" (UniqueName: \"kubernetes.io/projected/25199e98-91f1-4387-9c22-45a6d83a90c9-kube-api-access-mw2sj\") pod \"25199e98-91f1-4387-9c22-45a6d83a90c9\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.603633 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-utilities\") pod \"25199e98-91f1-4387-9c22-45a6d83a90c9\" (UID: \"25199e98-91f1-4387-9c22-45a6d83a90c9\") " Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.604402 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-utilities" (OuterVolumeSpecName: "utilities") pod "25199e98-91f1-4387-9c22-45a6d83a90c9" (UID: "25199e98-91f1-4387-9c22-45a6d83a90c9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.609630 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25199e98-91f1-4387-9c22-45a6d83a90c9-kube-api-access-mw2sj" (OuterVolumeSpecName: "kube-api-access-mw2sj") pod "25199e98-91f1-4387-9c22-45a6d83a90c9" (UID: "25199e98-91f1-4387-9c22-45a6d83a90c9"). InnerVolumeSpecName "kube-api-access-mw2sj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.628333 4868 scope.go:117] "RemoveContainer" containerID="bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.660480 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25199e98-91f1-4387-9c22-45a6d83a90c9" (UID: "25199e98-91f1-4387-9c22-45a6d83a90c9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.699667 4868 scope.go:117] "RemoveContainer" containerID="afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817" Oct 03 13:16:02 crc kubenswrapper[4868]: E1003 13:16:02.701140 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817\": container with ID starting with afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817 not found: ID does not exist" containerID="afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.701179 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817"} err="failed to get container status \"afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817\": rpc error: code = NotFound desc = could not find container \"afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817\": container with ID starting with afc48b5033212123ddc8facf958510892970e350f44c8e64a87a376c9b6ba817 not found: ID does not exist" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.701210 4868 scope.go:117] "RemoveContainer" containerID="c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d" Oct 03 13:16:02 crc kubenswrapper[4868]: E1003 13:16:02.705144 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d\": container with ID starting with c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d not found: ID does not exist" containerID="c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.705193 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d"} err="failed to get container status \"c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d\": rpc error: code = NotFound desc = could not find container \"c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d\": container with ID starting with c778fe295a1504433f31cb6dbdb98d4dcd07d0bbe7be3e5862af655e4032930d not found: ID does not exist" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.705218 4868 scope.go:117] "RemoveContainer" containerID="bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797" Oct 03 13:16:02 crc kubenswrapper[4868]: E1003 13:16:02.705572 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797\": container with ID starting with bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797 not found: ID does not exist" containerID="bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.705617 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797"} err="failed to get container status \"bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797\": rpc error: code = NotFound desc = could not find container \"bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797\": container with ID starting with bf3e406c1476376b1386712162c40b6f5247e7bddced4e3dfff995e149990797 not found: ID does not exist" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.707209 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mw2sj\" (UniqueName: \"kubernetes.io/projected/25199e98-91f1-4387-9c22-45a6d83a90c9-kube-api-access-mw2sj\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.707246 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.707258 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25199e98-91f1-4387-9c22-45a6d83a90c9-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.894242 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-75dl4"] Oct 03 13:16:02 crc kubenswrapper[4868]: I1003 13:16:02.904038 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-75dl4"] Oct 03 13:16:04 crc kubenswrapper[4868]: I1003 13:16:04.559935 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" path="/var/lib/kubelet/pods/25199e98-91f1-4387-9c22-45a6d83a90c9/volumes" Oct 03 13:16:06 crc kubenswrapper[4868]: I1003 13:16:06.601025 4868 generic.go:334] "Generic (PLEG): container finished" podID="e8ebaf9b-70a4-44de-8873-d9ff816819a1" containerID="3ab5be04c99b392152013c0c6bfaf4c87d8cfd95a6aa09f5d6e45569e325b196" exitCode=0 Oct 03 13:16:06 crc kubenswrapper[4868]: I1003 13:16:06.601123 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" event={"ID":"e8ebaf9b-70a4-44de-8873-d9ff816819a1","Type":"ContainerDied","Data":"3ab5be04c99b392152013c0c6bfaf4c87d8cfd95a6aa09f5d6e45569e325b196"} Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.048664 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.123840 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4slx\" (UniqueName: \"kubernetes.io/projected/e8ebaf9b-70a4-44de-8873-d9ff816819a1-kube-api-access-f4slx\") pod \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.123996 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-repo-setup-combined-ca-bundle\") pod \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.124048 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-ssh-key\") pod \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.124144 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-inventory\") pod \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\" (UID: \"e8ebaf9b-70a4-44de-8873-d9ff816819a1\") " Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.130917 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "e8ebaf9b-70a4-44de-8873-d9ff816819a1" (UID: "e8ebaf9b-70a4-44de-8873-d9ff816819a1"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.131114 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8ebaf9b-70a4-44de-8873-d9ff816819a1-kube-api-access-f4slx" (OuterVolumeSpecName: "kube-api-access-f4slx") pod "e8ebaf9b-70a4-44de-8873-d9ff816819a1" (UID: "e8ebaf9b-70a4-44de-8873-d9ff816819a1"). InnerVolumeSpecName "kube-api-access-f4slx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.156295 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-inventory" (OuterVolumeSpecName: "inventory") pod "e8ebaf9b-70a4-44de-8873-d9ff816819a1" (UID: "e8ebaf9b-70a4-44de-8873-d9ff816819a1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.161613 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e8ebaf9b-70a4-44de-8873-d9ff816819a1" (UID: "e8ebaf9b-70a4-44de-8873-d9ff816819a1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.226802 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.226852 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4slx\" (UniqueName: \"kubernetes.io/projected/e8ebaf9b-70a4-44de-8873-d9ff816819a1-kube-api-access-f4slx\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.226866 4868 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.226878 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e8ebaf9b-70a4-44de-8873-d9ff816819a1-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.643556 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" event={"ID":"e8ebaf9b-70a4-44de-8873-d9ff816819a1","Type":"ContainerDied","Data":"3b1e30b7f3c0acec27e68c3fa0fa4d28c537ba11c95482639ead57c94a6fa338"} Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.643598 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b1e30b7f3c0acec27e68c3fa0fa4d28c537ba11c95482639ead57c94a6fa338" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.643658 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-mctff" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.702169 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls"] Oct 03 13:16:08 crc kubenswrapper[4868]: E1003 13:16:08.702580 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="registry-server" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.702594 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="registry-server" Oct 03 13:16:08 crc kubenswrapper[4868]: E1003 13:16:08.702606 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8ebaf9b-70a4-44de-8873-d9ff816819a1" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.702613 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8ebaf9b-70a4-44de-8873-d9ff816819a1" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 13:16:08 crc kubenswrapper[4868]: E1003 13:16:08.702653 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="extract-utilities" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.702661 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="extract-utilities" Oct 03 13:16:08 crc kubenswrapper[4868]: E1003 13:16:08.702671 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="extract-content" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.702677 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="extract-content" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.702841 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8ebaf9b-70a4-44de-8873-d9ff816819a1" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.702865 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="25199e98-91f1-4387-9c22-45a6d83a90c9" containerName="registry-server" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.703531 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.705169 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.705620 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.706075 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.714065 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls"] Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.714092 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.839216 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.839269 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blnvg\" (UniqueName: \"kubernetes.io/projected/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-kube-api-access-blnvg\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.839355 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.941308 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.941375 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blnvg\" (UniqueName: \"kubernetes.io/projected/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-kube-api-access-blnvg\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.941422 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.945800 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.949427 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:08 crc kubenswrapper[4868]: I1003 13:16:08.963191 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blnvg\" (UniqueName: \"kubernetes.io/projected/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-kube-api-access-blnvg\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-scfls\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:09 crc kubenswrapper[4868]: I1003 13:16:09.029665 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:09 crc kubenswrapper[4868]: I1003 13:16:09.532785 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls"] Oct 03 13:16:09 crc kubenswrapper[4868]: W1003 13:16:09.534774 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9bb16b8f_fb6f_475e_b4cd_fdea4804d5e0.slice/crio-e5fa88cc9c2bd428859c3ae2f2992b4353c742e49ba332d1ca3296f8d5c926fe WatchSource:0}: Error finding container e5fa88cc9c2bd428859c3ae2f2992b4353c742e49ba332d1ca3296f8d5c926fe: Status 404 returned error can't find the container with id e5fa88cc9c2bd428859c3ae2f2992b4353c742e49ba332d1ca3296f8d5c926fe Oct 03 13:16:09 crc kubenswrapper[4868]: I1003 13:16:09.655614 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" event={"ID":"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0","Type":"ContainerStarted","Data":"e5fa88cc9c2bd428859c3ae2f2992b4353c742e49ba332d1ca3296f8d5c926fe"} Oct 03 13:16:10 crc kubenswrapper[4868]: I1003 13:16:10.667408 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" event={"ID":"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0","Type":"ContainerStarted","Data":"27eb8fb1e90b21506aed08eebaf19525e9562b020d10dc4d47b258772c30e3af"} Oct 03 13:16:10 crc kubenswrapper[4868]: I1003 13:16:10.691032 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" podStartSLOduration=2.508144893 podStartE2EDuration="2.691013407s" podCreationTimestamp="2025-10-03 13:16:08 +0000 UTC" firstStartedPulling="2025-10-03 13:16:09.539635326 +0000 UTC m=+1565.749484402" lastFinishedPulling="2025-10-03 13:16:09.72250385 +0000 UTC m=+1565.932352916" observedRunningTime="2025-10-03 13:16:10.681256155 +0000 UTC m=+1566.891105231" watchObservedRunningTime="2025-10-03 13:16:10.691013407 +0000 UTC m=+1566.900862473" Oct 03 13:16:12 crc kubenswrapper[4868]: I1003 13:16:12.687569 4868 generic.go:334] "Generic (PLEG): container finished" podID="9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0" containerID="27eb8fb1e90b21506aed08eebaf19525e9562b020d10dc4d47b258772c30e3af" exitCode=0 Oct 03 13:16:12 crc kubenswrapper[4868]: I1003 13:16:12.687665 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" event={"ID":"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0","Type":"ContainerDied","Data":"27eb8fb1e90b21506aed08eebaf19525e9562b020d10dc4d47b258772c30e3af"} Oct 03 13:16:13 crc kubenswrapper[4868]: I1003 13:16:13.336293 4868 scope.go:117] "RemoveContainer" containerID="2dc4f072b7830febd41ea7cbe7eca7d27c1121467e46a41fbf3027d374619a3d" Oct 03 13:16:13 crc kubenswrapper[4868]: I1003 13:16:13.372496 4868 scope.go:117] "RemoveContainer" containerID="b0f2a21f37f5b3b3bdbb8507a708023799e11718cac95370703e46492653ed9a" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.158664 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.261570 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-ssh-key\") pod \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.261620 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blnvg\" (UniqueName: \"kubernetes.io/projected/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-kube-api-access-blnvg\") pod \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.261708 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-inventory\") pod \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\" (UID: \"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0\") " Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.274332 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-kube-api-access-blnvg" (OuterVolumeSpecName: "kube-api-access-blnvg") pod "9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0" (UID: "9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0"). InnerVolumeSpecName "kube-api-access-blnvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.291741 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-inventory" (OuterVolumeSpecName: "inventory") pod "9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0" (UID: "9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.293165 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0" (UID: "9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.365023 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.365081 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.365092 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blnvg\" (UniqueName: \"kubernetes.io/projected/9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0-kube-api-access-blnvg\") on node \"crc\" DevicePath \"\"" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.711522 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" event={"ID":"9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0","Type":"ContainerDied","Data":"e5fa88cc9c2bd428859c3ae2f2992b4353c742e49ba332d1ca3296f8d5c926fe"} Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.711909 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e5fa88cc9c2bd428859c3ae2f2992b4353c742e49ba332d1ca3296f8d5c926fe" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.711626 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-scfls" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.780701 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc"] Oct 03 13:16:14 crc kubenswrapper[4868]: E1003 13:16:14.781512 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.781566 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.781790 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.782667 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.786223 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.786427 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.786542 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.786692 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.795175 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc"] Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.876174 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.876222 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q2xw\" (UniqueName: \"kubernetes.io/projected/788a1c65-a9bf-419d-aca4-464a22ece644-kube-api-access-8q2xw\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.876312 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.876360 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.979468 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.980127 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.980340 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.980369 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q2xw\" (UniqueName: \"kubernetes.io/projected/788a1c65-a9bf-419d-aca4-464a22ece644-kube-api-access-8q2xw\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.984419 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.984840 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:14 crc kubenswrapper[4868]: I1003 13:16:14.985981 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:15 crc kubenswrapper[4868]: I1003 13:16:15.004855 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q2xw\" (UniqueName: \"kubernetes.io/projected/788a1c65-a9bf-419d-aca4-464a22ece644-kube-api-access-8q2xw\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:15 crc kubenswrapper[4868]: I1003 13:16:15.152199 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:16:15 crc kubenswrapper[4868]: I1003 13:16:15.631019 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc"] Oct 03 13:16:15 crc kubenswrapper[4868]: I1003 13:16:15.727023 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" event={"ID":"788a1c65-a9bf-419d-aca4-464a22ece644","Type":"ContainerStarted","Data":"a7b772dbd17cd4f33b8a4ab71f8fd3a0ac5f32186e6507d0397ed16c56014e11"} Oct 03 13:16:17 crc kubenswrapper[4868]: I1003 13:16:17.746725 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" event={"ID":"788a1c65-a9bf-419d-aca4-464a22ece644","Type":"ContainerStarted","Data":"7b7e9aa577bd2515ba6e71f76c74293eac93ea8fef413e7918fc076404733e78"} Oct 03 13:16:17 crc kubenswrapper[4868]: I1003 13:16:17.776199 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" podStartSLOduration=2.329345967 podStartE2EDuration="3.776179108s" podCreationTimestamp="2025-10-03 13:16:14 +0000 UTC" firstStartedPulling="2025-10-03 13:16:15.634368783 +0000 UTC m=+1571.844217839" lastFinishedPulling="2025-10-03 13:16:17.081201914 +0000 UTC m=+1573.291050980" observedRunningTime="2025-10-03 13:16:17.77180257 +0000 UTC m=+1573.981651666" watchObservedRunningTime="2025-10-03 13:16:17.776179108 +0000 UTC m=+1573.986028184" Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.145154 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.146084 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.146173 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.147433 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.147514 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" gracePeriod=600 Oct 03 13:16:32 crc kubenswrapper[4868]: E1003 13:16:32.297354 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.909332 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" exitCode=0 Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.909427 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31"} Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.909819 4868 scope.go:117] "RemoveContainer" containerID="8b77235572aa8a2f22498a0d10a4db0ccbdcf4a0e02d3864bbf22793349616b6" Oct 03 13:16:32 crc kubenswrapper[4868]: I1003 13:16:32.910671 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:16:32 crc kubenswrapper[4868]: E1003 13:16:32.911025 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:16:46 crc kubenswrapper[4868]: I1003 13:16:46.544100 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:16:46 crc kubenswrapper[4868]: E1003 13:16:46.545031 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.613586 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-xh4z4"] Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.616815 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.629009 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xh4z4"] Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.747271 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-catalog-content\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.747433 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-utilities\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.747593 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhq6s\" (UniqueName: \"kubernetes.io/projected/5c556a94-06c8-4b28-b6bc-cf495d3291dc-kube-api-access-rhq6s\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.849926 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-catalog-content\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.850008 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-utilities\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.850090 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhq6s\" (UniqueName: \"kubernetes.io/projected/5c556a94-06c8-4b28-b6bc-cf495d3291dc-kube-api-access-rhq6s\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.850655 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-catalog-content\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.850739 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-utilities\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.875076 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhq6s\" (UniqueName: \"kubernetes.io/projected/5c556a94-06c8-4b28-b6bc-cf495d3291dc-kube-api-access-rhq6s\") pod \"redhat-marketplace-xh4z4\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:51 crc kubenswrapper[4868]: I1003 13:16:51.936877 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:16:52 crc kubenswrapper[4868]: I1003 13:16:52.397414 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-xh4z4"] Oct 03 13:16:53 crc kubenswrapper[4868]: I1003 13:16:53.109602 4868 generic.go:334] "Generic (PLEG): container finished" podID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerID="a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc" exitCode=0 Oct 03 13:16:53 crc kubenswrapper[4868]: I1003 13:16:53.109654 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xh4z4" event={"ID":"5c556a94-06c8-4b28-b6bc-cf495d3291dc","Type":"ContainerDied","Data":"a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc"} Oct 03 13:16:53 crc kubenswrapper[4868]: I1003 13:16:53.109682 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xh4z4" event={"ID":"5c556a94-06c8-4b28-b6bc-cf495d3291dc","Type":"ContainerStarted","Data":"3eb228d782dd847dcafe64dd25f9ba17a544c5c52e38cd6995250e97a2facd0d"} Oct 03 13:16:55 crc kubenswrapper[4868]: I1003 13:16:55.128191 4868 generic.go:334] "Generic (PLEG): container finished" podID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerID="0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf" exitCode=0 Oct 03 13:16:55 crc kubenswrapper[4868]: I1003 13:16:55.128280 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xh4z4" event={"ID":"5c556a94-06c8-4b28-b6bc-cf495d3291dc","Type":"ContainerDied","Data":"0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf"} Oct 03 13:16:58 crc kubenswrapper[4868]: I1003 13:16:58.162390 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xh4z4" event={"ID":"5c556a94-06c8-4b28-b6bc-cf495d3291dc","Type":"ContainerStarted","Data":"480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d"} Oct 03 13:16:58 crc kubenswrapper[4868]: I1003 13:16:58.184182 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-xh4z4" podStartSLOduration=3.1962314960000002 podStartE2EDuration="7.184161358s" podCreationTimestamp="2025-10-03 13:16:51 +0000 UTC" firstStartedPulling="2025-10-03 13:16:53.113823936 +0000 UTC m=+1609.323673002" lastFinishedPulling="2025-10-03 13:16:57.101753798 +0000 UTC m=+1613.311602864" observedRunningTime="2025-10-03 13:16:58.181654071 +0000 UTC m=+1614.391503157" watchObservedRunningTime="2025-10-03 13:16:58.184161358 +0000 UTC m=+1614.394010434" Oct 03 13:17:00 crc kubenswrapper[4868]: I1003 13:17:00.544469 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:17:00 crc kubenswrapper[4868]: E1003 13:17:00.545414 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:17:01 crc kubenswrapper[4868]: I1003 13:17:01.937134 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:17:01 crc kubenswrapper[4868]: I1003 13:17:01.937579 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:17:01 crc kubenswrapper[4868]: I1003 13:17:01.987451 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:17:02 crc kubenswrapper[4868]: I1003 13:17:02.237469 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:17:02 crc kubenswrapper[4868]: I1003 13:17:02.294469 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xh4z4"] Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.216190 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-xh4z4" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="registry-server" containerID="cri-o://480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d" gracePeriod=2 Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.690118 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.727726 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-utilities\") pod \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.727924 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-catalog-content\") pod \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.728148 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhq6s\" (UniqueName: \"kubernetes.io/projected/5c556a94-06c8-4b28-b6bc-cf495d3291dc-kube-api-access-rhq6s\") pod \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\" (UID: \"5c556a94-06c8-4b28-b6bc-cf495d3291dc\") " Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.729914 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-utilities" (OuterVolumeSpecName: "utilities") pod "5c556a94-06c8-4b28-b6bc-cf495d3291dc" (UID: "5c556a94-06c8-4b28-b6bc-cf495d3291dc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.735994 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c556a94-06c8-4b28-b6bc-cf495d3291dc-kube-api-access-rhq6s" (OuterVolumeSpecName: "kube-api-access-rhq6s") pod "5c556a94-06c8-4b28-b6bc-cf495d3291dc" (UID: "5c556a94-06c8-4b28-b6bc-cf495d3291dc"). InnerVolumeSpecName "kube-api-access-rhq6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.745703 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5c556a94-06c8-4b28-b6bc-cf495d3291dc" (UID: "5c556a94-06c8-4b28-b6bc-cf495d3291dc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.830408 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.830461 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhq6s\" (UniqueName: \"kubernetes.io/projected/5c556a94-06c8-4b28-b6bc-cf495d3291dc-kube-api-access-rhq6s\") on node \"crc\" DevicePath \"\"" Oct 03 13:17:04 crc kubenswrapper[4868]: I1003 13:17:04.830478 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5c556a94-06c8-4b28-b6bc-cf495d3291dc-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.230162 4868 generic.go:334] "Generic (PLEG): container finished" podID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerID="480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d" exitCode=0 Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.230262 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-xh4z4" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.230240 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xh4z4" event={"ID":"5c556a94-06c8-4b28-b6bc-cf495d3291dc","Type":"ContainerDied","Data":"480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d"} Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.231359 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-xh4z4" event={"ID":"5c556a94-06c8-4b28-b6bc-cf495d3291dc","Type":"ContainerDied","Data":"3eb228d782dd847dcafe64dd25f9ba17a544c5c52e38cd6995250e97a2facd0d"} Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.231405 4868 scope.go:117] "RemoveContainer" containerID="480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.257214 4868 scope.go:117] "RemoveContainer" containerID="0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.273490 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-xh4z4"] Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.282356 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-xh4z4"] Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.291137 4868 scope.go:117] "RemoveContainer" containerID="a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.333893 4868 scope.go:117] "RemoveContainer" containerID="480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d" Oct 03 13:17:05 crc kubenswrapper[4868]: E1003 13:17:05.334418 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d\": container with ID starting with 480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d not found: ID does not exist" containerID="480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.334484 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d"} err="failed to get container status \"480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d\": rpc error: code = NotFound desc = could not find container \"480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d\": container with ID starting with 480d8f8db5637ba55e12c62e08761d8e8f933f0cb50375763e03ad540d0f375d not found: ID does not exist" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.334518 4868 scope.go:117] "RemoveContainer" containerID="0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf" Oct 03 13:17:05 crc kubenswrapper[4868]: E1003 13:17:05.334829 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf\": container with ID starting with 0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf not found: ID does not exist" containerID="0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.334863 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf"} err="failed to get container status \"0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf\": rpc error: code = NotFound desc = could not find container \"0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf\": container with ID starting with 0ced1f40cf612b6960d87343d92fd1f91f3a9dacdb5fc310a5ff619479da5ebf not found: ID does not exist" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.334906 4868 scope.go:117] "RemoveContainer" containerID="a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc" Oct 03 13:17:05 crc kubenswrapper[4868]: E1003 13:17:05.335346 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc\": container with ID starting with a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc not found: ID does not exist" containerID="a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc" Oct 03 13:17:05 crc kubenswrapper[4868]: I1003 13:17:05.335378 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc"} err="failed to get container status \"a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc\": rpc error: code = NotFound desc = could not find container \"a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc\": container with ID starting with a816fac17b1b678cce1dac813d468b93e0372bbba88c389aa1b7deaada83d5fc not found: ID does not exist" Oct 03 13:17:06 crc kubenswrapper[4868]: I1003 13:17:06.554327 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" path="/var/lib/kubelet/pods/5c556a94-06c8-4b28-b6bc-cf495d3291dc/volumes" Oct 03 13:17:11 crc kubenswrapper[4868]: I1003 13:17:11.543990 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:17:11 crc kubenswrapper[4868]: E1003 13:17:11.545169 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:17:13 crc kubenswrapper[4868]: I1003 13:17:13.528220 4868 scope.go:117] "RemoveContainer" containerID="73ee913a098fd1ba42887e1d61de4a2a08c81c5a7dc9f05dd3a4472ec20c4820" Oct 03 13:17:24 crc kubenswrapper[4868]: I1003 13:17:24.555758 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:17:24 crc kubenswrapper[4868]: E1003 13:17:24.557406 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:17:37 crc kubenswrapper[4868]: I1003 13:17:37.544451 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:17:37 crc kubenswrapper[4868]: E1003 13:17:37.545277 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:17:49 crc kubenswrapper[4868]: I1003 13:17:49.543951 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:17:49 crc kubenswrapper[4868]: E1003 13:17:49.545339 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:18:04 crc kubenswrapper[4868]: I1003 13:18:04.558034 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:18:04 crc kubenswrapper[4868]: E1003 13:18:04.560693 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:18:13 crc kubenswrapper[4868]: I1003 13:18:13.615408 4868 scope.go:117] "RemoveContainer" containerID="800da94e35e2afd56472e53f231865f9e1fd639c9e44d7a684f5c583d3a882bb" Oct 03 13:18:13 crc kubenswrapper[4868]: I1003 13:18:13.639370 4868 scope.go:117] "RemoveContainer" containerID="3d44132a504a043606ee71b4269ea98fbd7bc828cc9cdcf80cbe91c64540c974" Oct 03 13:18:13 crc kubenswrapper[4868]: I1003 13:18:13.656727 4868 scope.go:117] "RemoveContainer" containerID="d8d95b978decdc1b6d22ac0c512fe4488fb7d9788b4aa34f6a160b09d48829a5" Oct 03 13:18:13 crc kubenswrapper[4868]: I1003 13:18:13.673558 4868 scope.go:117] "RemoveContainer" containerID="84cedba432c2e66e583f58046af47ce7e5662a7cead40e165a55c289cc880a2e" Oct 03 13:18:18 crc kubenswrapper[4868]: I1003 13:18:18.544295 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:18:18 crc kubenswrapper[4868]: E1003 13:18:18.545509 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:18:32 crc kubenswrapper[4868]: I1003 13:18:32.545627 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:18:32 crc kubenswrapper[4868]: E1003 13:18:32.547038 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:18:45 crc kubenswrapper[4868]: I1003 13:18:45.544233 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:18:45 crc kubenswrapper[4868]: E1003 13:18:45.545267 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:19:00 crc kubenswrapper[4868]: I1003 13:19:00.544836 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:19:00 crc kubenswrapper[4868]: E1003 13:19:00.545866 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:19:15 crc kubenswrapper[4868]: I1003 13:19:15.544491 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:19:15 crc kubenswrapper[4868]: E1003 13:19:15.545331 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:19:21 crc kubenswrapper[4868]: I1003 13:19:21.496225 4868 generic.go:334] "Generic (PLEG): container finished" podID="788a1c65-a9bf-419d-aca4-464a22ece644" containerID="7b7e9aa577bd2515ba6e71f76c74293eac93ea8fef413e7918fc076404733e78" exitCode=0 Oct 03 13:19:21 crc kubenswrapper[4868]: I1003 13:19:21.496318 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" event={"ID":"788a1c65-a9bf-419d-aca4-464a22ece644","Type":"ContainerDied","Data":"7b7e9aa577bd2515ba6e71f76c74293eac93ea8fef413e7918fc076404733e78"} Oct 03 13:19:22 crc kubenswrapper[4868]: I1003 13:19:22.952354 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.081071 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8q2xw\" (UniqueName: \"kubernetes.io/projected/788a1c65-a9bf-419d-aca4-464a22ece644-kube-api-access-8q2xw\") pod \"788a1c65-a9bf-419d-aca4-464a22ece644\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.081492 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-inventory\") pod \"788a1c65-a9bf-419d-aca4-464a22ece644\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.081569 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-bootstrap-combined-ca-bundle\") pod \"788a1c65-a9bf-419d-aca4-464a22ece644\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.081612 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-ssh-key\") pod \"788a1c65-a9bf-419d-aca4-464a22ece644\" (UID: \"788a1c65-a9bf-419d-aca4-464a22ece644\") " Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.089862 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "788a1c65-a9bf-419d-aca4-464a22ece644" (UID: "788a1c65-a9bf-419d-aca4-464a22ece644"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.090161 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/788a1c65-a9bf-419d-aca4-464a22ece644-kube-api-access-8q2xw" (OuterVolumeSpecName: "kube-api-access-8q2xw") pod "788a1c65-a9bf-419d-aca4-464a22ece644" (UID: "788a1c65-a9bf-419d-aca4-464a22ece644"). InnerVolumeSpecName "kube-api-access-8q2xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.113999 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "788a1c65-a9bf-419d-aca4-464a22ece644" (UID: "788a1c65-a9bf-419d-aca4-464a22ece644"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.116837 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-inventory" (OuterVolumeSpecName: "inventory") pod "788a1c65-a9bf-419d-aca4-464a22ece644" (UID: "788a1c65-a9bf-419d-aca4-464a22ece644"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.184502 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8q2xw\" (UniqueName: \"kubernetes.io/projected/788a1c65-a9bf-419d-aca4-464a22ece644-kube-api-access-8q2xw\") on node \"crc\" DevicePath \"\"" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.184548 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.184560 4868 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.184570 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/788a1c65-a9bf-419d-aca4-464a22ece644-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.521423 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" event={"ID":"788a1c65-a9bf-419d-aca4-464a22ece644","Type":"ContainerDied","Data":"a7b772dbd17cd4f33b8a4ab71f8fd3a0ac5f32186e6507d0397ed16c56014e11"} Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.521499 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.522155 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a7b772dbd17cd4f33b8a4ab71f8fd3a0ac5f32186e6507d0397ed16c56014e11" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.599201 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x"] Oct 03 13:19:23 crc kubenswrapper[4868]: E1003 13:19:23.599728 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="registry-server" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.599745 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="registry-server" Oct 03 13:19:23 crc kubenswrapper[4868]: E1003 13:19:23.599777 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="788a1c65-a9bf-419d-aca4-464a22ece644" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.599788 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="788a1c65-a9bf-419d-aca4-464a22ece644" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 13:19:23 crc kubenswrapper[4868]: E1003 13:19:23.599797 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="extract-utilities" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.599805 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="extract-utilities" Oct 03 13:19:23 crc kubenswrapper[4868]: E1003 13:19:23.599845 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="extract-content" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.599852 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="extract-content" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.600126 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="788a1c65-a9bf-419d-aca4-464a22ece644" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.600158 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c556a94-06c8-4b28-b6bc-cf495d3291dc" containerName="registry-server" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.600944 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.603195 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.603236 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.603315 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.603940 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.607946 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x"] Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.694447 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.694778 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.695673 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mgmsm\" (UniqueName: \"kubernetes.io/projected/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-kube-api-access-mgmsm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.797214 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.797317 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.797397 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mgmsm\" (UniqueName: \"kubernetes.io/projected/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-kube-api-access-mgmsm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.802498 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.802759 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.813841 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mgmsm\" (UniqueName: \"kubernetes.io/projected/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-kube-api-access-mgmsm\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-bph2x\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:23 crc kubenswrapper[4868]: I1003 13:19:23.917625 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:19:24 crc kubenswrapper[4868]: I1003 13:19:24.399571 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x"] Oct 03 13:19:24 crc kubenswrapper[4868]: I1003 13:19:24.406438 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:19:24 crc kubenswrapper[4868]: I1003 13:19:24.531682 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" event={"ID":"5ed03626-ece8-4aac-830d-ceef1eb2e5b8","Type":"ContainerStarted","Data":"c5e417b741cdf166e7e75d83a57a0071d3fbcf69bf73d4f0ef6eafd921a19484"} Oct 03 13:19:25 crc kubenswrapper[4868]: I1003 13:19:25.544484 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" event={"ID":"5ed03626-ece8-4aac-830d-ceef1eb2e5b8","Type":"ContainerStarted","Data":"68b49ffc9c619cc05696f3cf7f22f11d7ed39e9623cdcb7be2b65dfb28035165"} Oct 03 13:19:25 crc kubenswrapper[4868]: I1003 13:19:25.566682 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" podStartSLOduration=1.9576204499999998 podStartE2EDuration="2.566664269s" podCreationTimestamp="2025-10-03 13:19:23 +0000 UTC" firstStartedPulling="2025-10-03 13:19:24.406188347 +0000 UTC m=+1760.616037413" lastFinishedPulling="2025-10-03 13:19:25.015232166 +0000 UTC m=+1761.225081232" observedRunningTime="2025-10-03 13:19:25.564530552 +0000 UTC m=+1761.774379618" watchObservedRunningTime="2025-10-03 13:19:25.566664269 +0000 UTC m=+1761.776513335" Oct 03 13:19:30 crc kubenswrapper[4868]: I1003 13:19:30.544759 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:19:30 crc kubenswrapper[4868]: E1003 13:19:30.546030 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:19:35 crc kubenswrapper[4868]: I1003 13:19:35.043492 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-pnblv"] Oct 03 13:19:35 crc kubenswrapper[4868]: I1003 13:19:35.053518 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-sw7ll"] Oct 03 13:19:35 crc kubenswrapper[4868]: I1003 13:19:35.064702 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-kvllh"] Oct 03 13:19:35 crc kubenswrapper[4868]: I1003 13:19:35.073149 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-sw7ll"] Oct 03 13:19:35 crc kubenswrapper[4868]: I1003 13:19:35.081132 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-pnblv"] Oct 03 13:19:35 crc kubenswrapper[4868]: I1003 13:19:35.089356 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-kvllh"] Oct 03 13:19:36 crc kubenswrapper[4868]: I1003 13:19:36.557220 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="14d9af9c-75b5-4fa2-a918-abb6814addd7" path="/var/lib/kubelet/pods/14d9af9c-75b5-4fa2-a918-abb6814addd7/volumes" Oct 03 13:19:36 crc kubenswrapper[4868]: I1003 13:19:36.558388 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="80b2d7b9-b29e-44a3-b55e-04e21bf1afd5" path="/var/lib/kubelet/pods/80b2d7b9-b29e-44a3-b55e-04e21bf1afd5/volumes" Oct 03 13:19:36 crc kubenswrapper[4868]: I1003 13:19:36.559303 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f27bd7bc-49d9-4848-ae93-24de591a246b" path="/var/lib/kubelet/pods/f27bd7bc-49d9-4848-ae93-24de591a246b/volumes" Oct 03 13:19:43 crc kubenswrapper[4868]: I1003 13:19:43.544812 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:19:43 crc kubenswrapper[4868]: E1003 13:19:43.545840 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:19:44 crc kubenswrapper[4868]: I1003 13:19:44.031880 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-0b54-account-create-zkj92"] Oct 03 13:19:44 crc kubenswrapper[4868]: I1003 13:19:44.038402 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-0b54-account-create-zkj92"] Oct 03 13:19:44 crc kubenswrapper[4868]: I1003 13:19:44.563011 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e1ef9769-358f-4f0e-850f-3da4ffd5637b" path="/var/lib/kubelet/pods/e1ef9769-358f-4f0e-850f-3da4ffd5637b/volumes" Oct 03 13:19:46 crc kubenswrapper[4868]: I1003 13:19:46.030017 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-73a0-account-create-llxsx"] Oct 03 13:19:46 crc kubenswrapper[4868]: I1003 13:19:46.040005 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-73a0-account-create-llxsx"] Oct 03 13:19:46 crc kubenswrapper[4868]: I1003 13:19:46.555307 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ea96de3-ce21-44c7-8447-9d85172f25e4" path="/var/lib/kubelet/pods/3ea96de3-ce21-44c7-8447-9d85172f25e4/volumes" Oct 03 13:19:51 crc kubenswrapper[4868]: I1003 13:19:51.037111 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-vw4nf"] Oct 03 13:19:51 crc kubenswrapper[4868]: I1003 13:19:51.049724 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-qkmwv"] Oct 03 13:19:51 crc kubenswrapper[4868]: I1003 13:19:51.059593 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-vw4nf"] Oct 03 13:19:51 crc kubenswrapper[4868]: I1003 13:19:51.068293 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-qkmwv"] Oct 03 13:19:51 crc kubenswrapper[4868]: I1003 13:19:51.076702 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-xkslz"] Oct 03 13:19:51 crc kubenswrapper[4868]: I1003 13:19:51.084677 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-xkslz"] Oct 03 13:19:52 crc kubenswrapper[4868]: I1003 13:19:52.557575 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="113709cc-e6ba-4cb5-9eb4-7ddc62f39afe" path="/var/lib/kubelet/pods/113709cc-e6ba-4cb5-9eb4-7ddc62f39afe/volumes" Oct 03 13:19:52 crc kubenswrapper[4868]: I1003 13:19:52.558804 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57f54345-335f-4bda-99a4-0e25bcb84c69" path="/var/lib/kubelet/pods/57f54345-335f-4bda-99a4-0e25bcb84c69/volumes" Oct 03 13:19:52 crc kubenswrapper[4868]: I1003 13:19:52.559322 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d01dd09-a4c0-4798-aacf-9f601f0f3502" path="/var/lib/kubelet/pods/7d01dd09-a4c0-4798-aacf-9f601f0f3502/volumes" Oct 03 13:19:54 crc kubenswrapper[4868]: I1003 13:19:54.048997 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-6472-account-create-hg6f7"] Oct 03 13:19:54 crc kubenswrapper[4868]: I1003 13:19:54.058106 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-6472-account-create-hg6f7"] Oct 03 13:19:54 crc kubenswrapper[4868]: I1003 13:19:54.560466 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:19:54 crc kubenswrapper[4868]: I1003 13:19:54.561092 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8c65bc31-bfff-4894-855e-e411deea1819" path="/var/lib/kubelet/pods/8c65bc31-bfff-4894-855e-e411deea1819/volumes" Oct 03 13:19:54 crc kubenswrapper[4868]: E1003 13:19:54.561422 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:20:03 crc kubenswrapper[4868]: I1003 13:20:03.031753 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-8f80-account-create-bqgxd"] Oct 03 13:20:03 crc kubenswrapper[4868]: I1003 13:20:03.041366 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-3fdd-account-create-sdqv9"] Oct 03 13:20:03 crc kubenswrapper[4868]: I1003 13:20:03.054983 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-f1da-account-create-pmqbc"] Oct 03 13:20:03 crc kubenswrapper[4868]: I1003 13:20:03.063036 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-8f80-account-create-bqgxd"] Oct 03 13:20:03 crc kubenswrapper[4868]: I1003 13:20:03.070643 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-3fdd-account-create-sdqv9"] Oct 03 13:20:03 crc kubenswrapper[4868]: I1003 13:20:03.077719 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-f1da-account-create-pmqbc"] Oct 03 13:20:04 crc kubenswrapper[4868]: I1003 13:20:04.558706 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1fb6f069-dc74-4ed1-9c25-848adbceb12e" path="/var/lib/kubelet/pods/1fb6f069-dc74-4ed1-9c25-848adbceb12e/volumes" Oct 03 13:20:04 crc kubenswrapper[4868]: I1003 13:20:04.560819 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="360ccb3f-569b-4679-bb72-149e646914f6" path="/var/lib/kubelet/pods/360ccb3f-569b-4679-bb72-149e646914f6/volumes" Oct 03 13:20:04 crc kubenswrapper[4868]: I1003 13:20:04.561973 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c01fe0a-9b35-4ad3-976f-2e516e3b1d55" path="/var/lib/kubelet/pods/3c01fe0a-9b35-4ad3-976f-2e516e3b1d55/volumes" Oct 03 13:20:08 crc kubenswrapper[4868]: I1003 13:20:08.544656 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:20:08 crc kubenswrapper[4868]: E1003 13:20:08.545626 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:20:13 crc kubenswrapper[4868]: I1003 13:20:13.743432 4868 scope.go:117] "RemoveContainer" containerID="9c235725f63464c8c72ef58a773f7f874c74d65001a9ef80949b03391743402b" Oct 03 13:20:13 crc kubenswrapper[4868]: I1003 13:20:13.785498 4868 scope.go:117] "RemoveContainer" containerID="f63d7b32ca95d3eb702a9fc2e56ed660573604f0034fec42aff53ef7382831a3" Oct 03 13:20:13 crc kubenswrapper[4868]: I1003 13:20:13.843851 4868 scope.go:117] "RemoveContainer" containerID="055a546679aad786111f5d77321a9906eadfa8e3e8ca07f4746a96338176408d" Oct 03 13:20:13 crc kubenswrapper[4868]: I1003 13:20:13.895884 4868 scope.go:117] "RemoveContainer" containerID="02d223423f833ba2e36f0f7266148a3addd92dab30eccb7a6abe0a3b9b3be0aa" Oct 03 13:20:13 crc kubenswrapper[4868]: I1003 13:20:13.941725 4868 scope.go:117] "RemoveContainer" containerID="ff4f32c06c6aa9c7ef6502852abf3909d60d1c5c77bec76895b614e377ada302" Oct 03 13:20:13 crc kubenswrapper[4868]: I1003 13:20:13.984832 4868 scope.go:117] "RemoveContainer" containerID="788a2a71d8590da76a07322415d4fd884637e3a1f2b1542a7244770414c5169d" Oct 03 13:20:14 crc kubenswrapper[4868]: I1003 13:20:14.036502 4868 scope.go:117] "RemoveContainer" containerID="fe6d20aff7678167494707934a72931747ede99fcf012e0301450a7c092d5b30" Oct 03 13:20:14 crc kubenswrapper[4868]: I1003 13:20:14.060881 4868 scope.go:117] "RemoveContainer" containerID="4270681f5a62c3977527854fb986e54c3412d21903f0c3037684d75ec4b3e5c1" Oct 03 13:20:14 crc kubenswrapper[4868]: I1003 13:20:14.088535 4868 scope.go:117] "RemoveContainer" containerID="01820ede449f37ae3b5742b5d25988d42207e4e62a973ab05c7718e1b4ca9869" Oct 03 13:20:14 crc kubenswrapper[4868]: I1003 13:20:14.113010 4868 scope.go:117] "RemoveContainer" containerID="0d93addd038ce9b59f9774215bf42b2be184fecf69271bd3bb0de9a86be65256" Oct 03 13:20:14 crc kubenswrapper[4868]: I1003 13:20:14.144568 4868 scope.go:117] "RemoveContainer" containerID="0ad2d560b98f1f990748228d9fd8fa1fc2c0ac53476fe39799fd8b004fc2f092" Oct 03 13:20:14 crc kubenswrapper[4868]: I1003 13:20:14.171137 4868 scope.go:117] "RemoveContainer" containerID="676bc9d954942f888b57a60c03aee1c2f170d78fca36a66e22f1f87eb8b1518b" Oct 03 13:20:22 crc kubenswrapper[4868]: I1003 13:20:22.544737 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:20:22 crc kubenswrapper[4868]: E1003 13:20:22.545775 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:20:25 crc kubenswrapper[4868]: I1003 13:20:25.048382 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-2b7zv"] Oct 03 13:20:25 crc kubenswrapper[4868]: I1003 13:20:25.059673 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-2b7zv"] Oct 03 13:20:26 crc kubenswrapper[4868]: I1003 13:20:26.557268 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4b896eb-56e1-4a1e-b78a-7004b5b21556" path="/var/lib/kubelet/pods/b4b896eb-56e1-4a1e-b78a-7004b5b21556/volumes" Oct 03 13:20:36 crc kubenswrapper[4868]: I1003 13:20:36.544853 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:20:36 crc kubenswrapper[4868]: E1003 13:20:36.545816 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:20:47 crc kubenswrapper[4868]: I1003 13:20:47.543926 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:20:47 crc kubenswrapper[4868]: E1003 13:20:47.545833 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:20:58 crc kubenswrapper[4868]: I1003 13:20:58.466460 4868 generic.go:334] "Generic (PLEG): container finished" podID="5ed03626-ece8-4aac-830d-ceef1eb2e5b8" containerID="68b49ffc9c619cc05696f3cf7f22f11d7ed39e9623cdcb7be2b65dfb28035165" exitCode=0 Oct 03 13:20:58 crc kubenswrapper[4868]: I1003 13:20:58.466566 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" event={"ID":"5ed03626-ece8-4aac-830d-ceef1eb2e5b8","Type":"ContainerDied","Data":"68b49ffc9c619cc05696f3cf7f22f11d7ed39e9623cdcb7be2b65dfb28035165"} Oct 03 13:20:59 crc kubenswrapper[4868]: I1003 13:20:59.912408 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:20:59 crc kubenswrapper[4868]: I1003 13:20:59.990007 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-ssh-key\") pod \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " Oct 03 13:20:59 crc kubenswrapper[4868]: I1003 13:20:59.990112 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-inventory\") pod \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " Oct 03 13:20:59 crc kubenswrapper[4868]: I1003 13:20:59.990169 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mgmsm\" (UniqueName: \"kubernetes.io/projected/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-kube-api-access-mgmsm\") pod \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\" (UID: \"5ed03626-ece8-4aac-830d-ceef1eb2e5b8\") " Oct 03 13:20:59 crc kubenswrapper[4868]: I1003 13:20:59.997972 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-kube-api-access-mgmsm" (OuterVolumeSpecName: "kube-api-access-mgmsm") pod "5ed03626-ece8-4aac-830d-ceef1eb2e5b8" (UID: "5ed03626-ece8-4aac-830d-ceef1eb2e5b8"). InnerVolumeSpecName "kube-api-access-mgmsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.026609 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-inventory" (OuterVolumeSpecName: "inventory") pod "5ed03626-ece8-4aac-830d-ceef1eb2e5b8" (UID: "5ed03626-ece8-4aac-830d-ceef1eb2e5b8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.029617 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5ed03626-ece8-4aac-830d-ceef1eb2e5b8" (UID: "5ed03626-ece8-4aac-830d-ceef1eb2e5b8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.092236 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.092293 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.092307 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mgmsm\" (UniqueName: \"kubernetes.io/projected/5ed03626-ece8-4aac-830d-ceef1eb2e5b8-kube-api-access-mgmsm\") on node \"crc\" DevicePath \"\"" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.494106 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" event={"ID":"5ed03626-ece8-4aac-830d-ceef1eb2e5b8","Type":"ContainerDied","Data":"c5e417b741cdf166e7e75d83a57a0071d3fbcf69bf73d4f0ef6eafd921a19484"} Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.494146 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c5e417b741cdf166e7e75d83a57a0071d3fbcf69bf73d4f0ef6eafd921a19484" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.494202 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-bph2x" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.551941 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:21:00 crc kubenswrapper[4868]: E1003 13:21:00.552742 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.625862 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb"] Oct 03 13:21:00 crc kubenswrapper[4868]: E1003 13:21:00.626308 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ed03626-ece8-4aac-830d-ceef1eb2e5b8" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.626323 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ed03626-ece8-4aac-830d-ceef1eb2e5b8" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.626509 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ed03626-ece8-4aac-830d-ceef1eb2e5b8" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.627171 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.636561 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.636653 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.636782 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.636970 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.656793 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb"] Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.714439 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.714542 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npbdq\" (UniqueName: \"kubernetes.io/projected/c36e73f9-31f4-474b-9f2d-e88328a747fc-kube-api-access-npbdq\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.714606 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.821824 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npbdq\" (UniqueName: \"kubernetes.io/projected/c36e73f9-31f4-474b-9f2d-e88328a747fc-kube-api-access-npbdq\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.821934 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.822159 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.829279 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.829769 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.847201 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npbdq\" (UniqueName: \"kubernetes.io/projected/c36e73f9-31f4-474b-9f2d-e88328a747fc-kube-api-access-npbdq\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-98nqb\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:00 crc kubenswrapper[4868]: I1003 13:21:00.970890 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:21:01 crc kubenswrapper[4868]: I1003 13:21:01.360836 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb"] Oct 03 13:21:01 crc kubenswrapper[4868]: I1003 13:21:01.503611 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" event={"ID":"c36e73f9-31f4-474b-9f2d-e88328a747fc","Type":"ContainerStarted","Data":"d18773275bce5d6a186547d78901214866b143d01a2dc6040e0db6aa27031947"} Oct 03 13:21:02 crc kubenswrapper[4868]: I1003 13:21:02.514684 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" event={"ID":"c36e73f9-31f4-474b-9f2d-e88328a747fc","Type":"ContainerStarted","Data":"5880e8a3de5555fbfe25b664ef33f59c54e73a15c8b855fb389f65370f7af6bf"} Oct 03 13:21:02 crc kubenswrapper[4868]: I1003 13:21:02.539304 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" podStartSLOduration=1.785787478 podStartE2EDuration="2.539280061s" podCreationTimestamp="2025-10-03 13:21:00 +0000 UTC" firstStartedPulling="2025-10-03 13:21:01.364329572 +0000 UTC m=+1857.574178638" lastFinishedPulling="2025-10-03 13:21:02.117822155 +0000 UTC m=+1858.327671221" observedRunningTime="2025-10-03 13:21:02.528374828 +0000 UTC m=+1858.738223904" watchObservedRunningTime="2025-10-03 13:21:02.539280061 +0000 UTC m=+1858.749129127" Oct 03 13:21:09 crc kubenswrapper[4868]: I1003 13:21:09.052100 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-pmgh8"] Oct 03 13:21:09 crc kubenswrapper[4868]: I1003 13:21:09.059382 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-pmgh8"] Oct 03 13:21:10 crc kubenswrapper[4868]: I1003 13:21:10.555268 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05b10dd2-0b79-4dfe-9a42-52a392f3cbee" path="/var/lib/kubelet/pods/05b10dd2-0b79-4dfe-9a42-52a392f3cbee/volumes" Oct 03 13:21:12 crc kubenswrapper[4868]: I1003 13:21:12.029979 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-txpvc"] Oct 03 13:21:12 crc kubenswrapper[4868]: I1003 13:21:12.038778 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-txpvc"] Oct 03 13:21:12 crc kubenswrapper[4868]: I1003 13:21:12.557600 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bcabf93f-09c3-4dd6-8ba9-2556afc0c15b" path="/var/lib/kubelet/pods/bcabf93f-09c3-4dd6-8ba9-2556afc0c15b/volumes" Oct 03 13:21:13 crc kubenswrapper[4868]: I1003 13:21:13.544816 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:21:13 crc kubenswrapper[4868]: E1003 13:21:13.545214 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:21:14 crc kubenswrapper[4868]: I1003 13:21:14.452514 4868 scope.go:117] "RemoveContainer" containerID="bd8d2315c652f0a51793e098f994c8c5794adbc6676fa5afcf3d48151a0064c9" Oct 03 13:21:14 crc kubenswrapper[4868]: I1003 13:21:14.505837 4868 scope.go:117] "RemoveContainer" containerID="247ce8911c25647e81a760133a366ebff6a0967cbf8fcffb3f0a9f94e6d770f7" Oct 03 13:21:14 crc kubenswrapper[4868]: I1003 13:21:14.570992 4868 scope.go:117] "RemoveContainer" containerID="4e120149ca6599d9843af7ee57b6762d19c87d6cec68112e5bb9be52d7e922d6" Oct 03 13:21:19 crc kubenswrapper[4868]: I1003 13:21:19.041118 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-v2vbf"] Oct 03 13:21:19 crc kubenswrapper[4868]: I1003 13:21:19.051889 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-v2vbf"] Oct 03 13:21:20 crc kubenswrapper[4868]: I1003 13:21:20.554658 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebf1c326-1c1f-45c6-a9af-f758959b97cf" path="/var/lib/kubelet/pods/ebf1c326-1c1f-45c6-a9af-f758959b97cf/volumes" Oct 03 13:21:25 crc kubenswrapper[4868]: I1003 13:21:25.544081 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:21:25 crc kubenswrapper[4868]: E1003 13:21:25.544763 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:21:31 crc kubenswrapper[4868]: I1003 13:21:31.045420 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-7f6km"] Oct 03 13:21:31 crc kubenswrapper[4868]: I1003 13:21:31.058635 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-7f6km"] Oct 03 13:21:32 crc kubenswrapper[4868]: I1003 13:21:32.558601 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf1f9cd5-4be0-47d6-a72b-46c83aebb53b" path="/var/lib/kubelet/pods/cf1f9cd5-4be0-47d6-a72b-46c83aebb53b/volumes" Oct 03 13:21:35 crc kubenswrapper[4868]: I1003 13:21:35.039955 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-mdrmw"] Oct 03 13:21:35 crc kubenswrapper[4868]: I1003 13:21:35.048990 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-mdrmw"] Oct 03 13:21:36 crc kubenswrapper[4868]: I1003 13:21:36.544103 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:21:36 crc kubenswrapper[4868]: I1003 13:21:36.558781 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff373899-8e15-4a17-a2dc-ae81859fc44e" path="/var/lib/kubelet/pods/ff373899-8e15-4a17-a2dc-ae81859fc44e/volumes" Oct 03 13:21:36 crc kubenswrapper[4868]: I1003 13:21:36.842361 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"74f4ebe53929f202105f99fe9a6e9f2a2ffdd2b6bf57fa49540987a75c9833e2"} Oct 03 13:21:48 crc kubenswrapper[4868]: I1003 13:21:48.034364 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-8krw5"] Oct 03 13:21:48 crc kubenswrapper[4868]: I1003 13:21:48.042544 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-8krw5"] Oct 03 13:21:48 crc kubenswrapper[4868]: I1003 13:21:48.556014 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29d1fa62-1c44-47f6-9c4c-c9023d4ef342" path="/var/lib/kubelet/pods/29d1fa62-1c44-47f6-9c4c-c9023d4ef342/volumes" Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.051406 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-vv67p"] Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.068195 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-2rfdh"] Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.077100 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-wgb4q"] Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.084894 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-2rfdh"] Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.093997 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-vv67p"] Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.101564 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-wgb4q"] Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.558326 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="014e693d-4546-4fdf-b800-2d4263f9aedf" path="/var/lib/kubelet/pods/014e693d-4546-4fdf-b800-2d4263f9aedf/volumes" Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.560017 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d96bdf5a-e79e-4746-aa96-32043e17fcb4" path="/var/lib/kubelet/pods/d96bdf5a-e79e-4746-aa96-32043e17fcb4/volumes" Oct 03 13:22:10 crc kubenswrapper[4868]: I1003 13:22:10.560671 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ddf7e94d-f6c1-450a-8ef1-8729c1f078ba" path="/var/lib/kubelet/pods/ddf7e94d-f6c1-450a-8ef1-8729c1f078ba/volumes" Oct 03 13:22:14 crc kubenswrapper[4868]: I1003 13:22:14.683091 4868 scope.go:117] "RemoveContainer" containerID="4003895d18873ec4c7e75627e60e8bbd003495f765e71d869bbf905e972a8261" Oct 03 13:22:14 crc kubenswrapper[4868]: I1003 13:22:14.724840 4868 scope.go:117] "RemoveContainer" containerID="8163c9a92feaddd362cb2dfff8aa650a81a4d993a162ff1d953fdcb7b94f1995" Oct 03 13:22:14 crc kubenswrapper[4868]: I1003 13:22:14.768844 4868 scope.go:117] "RemoveContainer" containerID="06c7173f06c45cf9dbeceb400762c30c5ed8926f9f8546dcbdb240c743893616" Oct 03 13:22:14 crc kubenswrapper[4868]: I1003 13:22:14.832712 4868 scope.go:117] "RemoveContainer" containerID="36e74777225a08ea4c362b6de6751b600614c7b59ed6e55d102e121da56490a5" Oct 03 13:22:14 crc kubenswrapper[4868]: I1003 13:22:14.872033 4868 scope.go:117] "RemoveContainer" containerID="5fc7d3eab193e67830007fa455e9ab28deddf6084edd6b36056954ab14d726a5" Oct 03 13:22:14 crc kubenswrapper[4868]: I1003 13:22:14.934188 4868 scope.go:117] "RemoveContainer" containerID="96e367c82b3c051b5c49a603b1b287a7a91e185d415e2d301909b76b69139628" Oct 03 13:22:14 crc kubenswrapper[4868]: I1003 13:22:14.970619 4868 scope.go:117] "RemoveContainer" containerID="e493069de42bc4c6d847ec08e2b7a4ba442ef271a398b9a66912a2316d7cc0a7" Oct 03 13:22:15 crc kubenswrapper[4868]: I1003 13:22:15.208081 4868 generic.go:334] "Generic (PLEG): container finished" podID="c36e73f9-31f4-474b-9f2d-e88328a747fc" containerID="5880e8a3de5555fbfe25b664ef33f59c54e73a15c8b855fb389f65370f7af6bf" exitCode=0 Oct 03 13:22:15 crc kubenswrapper[4868]: I1003 13:22:15.208158 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" event={"ID":"c36e73f9-31f4-474b-9f2d-e88328a747fc","Type":"ContainerDied","Data":"5880e8a3de5555fbfe25b664ef33f59c54e73a15c8b855fb389f65370f7af6bf"} Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.683850 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.795856 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-inventory\") pod \"c36e73f9-31f4-474b-9f2d-e88328a747fc\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.796058 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-ssh-key\") pod \"c36e73f9-31f4-474b-9f2d-e88328a747fc\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.796171 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-npbdq\" (UniqueName: \"kubernetes.io/projected/c36e73f9-31f4-474b-9f2d-e88328a747fc-kube-api-access-npbdq\") pod \"c36e73f9-31f4-474b-9f2d-e88328a747fc\" (UID: \"c36e73f9-31f4-474b-9f2d-e88328a747fc\") " Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.802472 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c36e73f9-31f4-474b-9f2d-e88328a747fc-kube-api-access-npbdq" (OuterVolumeSpecName: "kube-api-access-npbdq") pod "c36e73f9-31f4-474b-9f2d-e88328a747fc" (UID: "c36e73f9-31f4-474b-9f2d-e88328a747fc"). InnerVolumeSpecName "kube-api-access-npbdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.827679 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-inventory" (OuterVolumeSpecName: "inventory") pod "c36e73f9-31f4-474b-9f2d-e88328a747fc" (UID: "c36e73f9-31f4-474b-9f2d-e88328a747fc"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.832718 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c36e73f9-31f4-474b-9f2d-e88328a747fc" (UID: "c36e73f9-31f4-474b-9f2d-e88328a747fc"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.899505 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.899552 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c36e73f9-31f4-474b-9f2d-e88328a747fc-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:22:16 crc kubenswrapper[4868]: I1003 13:22:16.899563 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-npbdq\" (UniqueName: \"kubernetes.io/projected/c36e73f9-31f4-474b-9f2d-e88328a747fc-kube-api-access-npbdq\") on node \"crc\" DevicePath \"\"" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.229448 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" event={"ID":"c36e73f9-31f4-474b-9f2d-e88328a747fc","Type":"ContainerDied","Data":"d18773275bce5d6a186547d78901214866b143d01a2dc6040e0db6aa27031947"} Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.229515 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d18773275bce5d6a186547d78901214866b143d01a2dc6040e0db6aa27031947" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.229596 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-98nqb" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.322361 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq"] Oct 03 13:22:17 crc kubenswrapper[4868]: E1003 13:22:17.332776 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c36e73f9-31f4-474b-9f2d-e88328a747fc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.332855 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="c36e73f9-31f4-474b-9f2d-e88328a747fc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.333242 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="c36e73f9-31f4-474b-9f2d-e88328a747fc" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.334104 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq"] Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.334237 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.339655 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.339916 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.340042 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.340203 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.410173 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t9vxt\" (UniqueName: \"kubernetes.io/projected/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-kube-api-access-t9vxt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.410387 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.410437 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.512579 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t9vxt\" (UniqueName: \"kubernetes.io/projected/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-kube-api-access-t9vxt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.512717 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.512744 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.518603 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.530640 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.535617 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t9vxt\" (UniqueName: \"kubernetes.io/projected/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-kube-api-access-t9vxt\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:17 crc kubenswrapper[4868]: I1003 13:22:17.654741 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:18 crc kubenswrapper[4868]: I1003 13:22:18.210830 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq"] Oct 03 13:22:18 crc kubenswrapper[4868]: W1003 13:22:18.219516 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb3cfb364_3b86_4c7c_bb56_cf85bfd9cde6.slice/crio-b80fc471e503944eb565a7b74126d6a6a56467fa86652e309dc56b4076ee6264 WatchSource:0}: Error finding container b80fc471e503944eb565a7b74126d6a6a56467fa86652e309dc56b4076ee6264: Status 404 returned error can't find the container with id b80fc471e503944eb565a7b74126d6a6a56467fa86652e309dc56b4076ee6264 Oct 03 13:22:18 crc kubenswrapper[4868]: I1003 13:22:18.247441 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" event={"ID":"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6","Type":"ContainerStarted","Data":"b80fc471e503944eb565a7b74126d6a6a56467fa86652e309dc56b4076ee6264"} Oct 03 13:22:19 crc kubenswrapper[4868]: I1003 13:22:19.260781 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" event={"ID":"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6","Type":"ContainerStarted","Data":"96de29625cd6edf7c0eb38a535a8f13df1584deff72ed2066989bed798d97ecf"} Oct 03 13:22:19 crc kubenswrapper[4868]: I1003 13:22:19.280275 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" podStartSLOduration=2.041357266 podStartE2EDuration="2.280247562s" podCreationTimestamp="2025-10-03 13:22:17 +0000 UTC" firstStartedPulling="2025-10-03 13:22:18.222755896 +0000 UTC m=+1934.432604952" lastFinishedPulling="2025-10-03 13:22:18.461646182 +0000 UTC m=+1934.671495248" observedRunningTime="2025-10-03 13:22:19.276482912 +0000 UTC m=+1935.486331978" watchObservedRunningTime="2025-10-03 13:22:19.280247562 +0000 UTC m=+1935.490096628" Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.038600 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-e702-account-create-6sf9b"] Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.047003 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-93b9-account-create-x6xqg"] Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.058249 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-418d-account-create-hrxpx"] Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.067754 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-e702-account-create-6sf9b"] Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.075999 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-93b9-account-create-x6xqg"] Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.083342 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-418d-account-create-hrxpx"] Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.305861 4868 generic.go:334] "Generic (PLEG): container finished" podID="b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6" containerID="96de29625cd6edf7c0eb38a535a8f13df1584deff72ed2066989bed798d97ecf" exitCode=0 Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.305923 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" event={"ID":"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6","Type":"ContainerDied","Data":"96de29625cd6edf7c0eb38a535a8f13df1584deff72ed2066989bed798d97ecf"} Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.606567 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2ab9aaef-7f57-4e05-a309-808b35cf821b" path="/var/lib/kubelet/pods/2ab9aaef-7f57-4e05-a309-808b35cf821b/volumes" Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.607937 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a21c034-e84f-4405-ac68-03ddba17adc0" path="/var/lib/kubelet/pods/9a21c034-e84f-4405-ac68-03ddba17adc0/volumes" Oct 03 13:22:24 crc kubenswrapper[4868]: I1003 13:22:24.608697 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd9bdd67-1db6-4099-b277-c751798a6d1b" path="/var/lib/kubelet/pods/dd9bdd67-1db6-4099-b277-c751798a6d1b/volumes" Oct 03 13:22:25 crc kubenswrapper[4868]: I1003 13:22:25.978148 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.091740 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-inventory\") pod \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.091824 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t9vxt\" (UniqueName: \"kubernetes.io/projected/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-kube-api-access-t9vxt\") pod \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.092031 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-ssh-key\") pod \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\" (UID: \"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6\") " Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.099665 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-kube-api-access-t9vxt" (OuterVolumeSpecName: "kube-api-access-t9vxt") pod "b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6" (UID: "b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6"). InnerVolumeSpecName "kube-api-access-t9vxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.165466 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6" (UID: "b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.195166 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.195214 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t9vxt\" (UniqueName: \"kubernetes.io/projected/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-kube-api-access-t9vxt\") on node \"crc\" DevicePath \"\"" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.202239 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-inventory" (OuterVolumeSpecName: "inventory") pod "b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6" (UID: "b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.297772 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.324421 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" event={"ID":"b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6","Type":"ContainerDied","Data":"b80fc471e503944eb565a7b74126d6a6a56467fa86652e309dc56b4076ee6264"} Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.324462 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b80fc471e503944eb565a7b74126d6a6a56467fa86652e309dc56b4076ee6264" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.324505 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.412079 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll"] Oct 03 13:22:26 crc kubenswrapper[4868]: E1003 13:22:26.412969 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.412996 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.413207 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.413947 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.419791 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.420063 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.420268 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.420552 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.426620 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll"] Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.501415 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.501501 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.501544 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgxql\" (UniqueName: \"kubernetes.io/projected/3876c0c5-a630-4bf0-9072-3bcf5889430c-kube-api-access-tgxql\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.603007 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.603094 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgxql\" (UniqueName: \"kubernetes.io/projected/3876c0c5-a630-4bf0-9072-3bcf5889430c-kube-api-access-tgxql\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.603223 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.607526 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.608016 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.626943 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgxql\" (UniqueName: \"kubernetes.io/projected/3876c0c5-a630-4bf0-9072-3bcf5889430c-kube-api-access-tgxql\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-bdgll\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:26 crc kubenswrapper[4868]: I1003 13:22:26.732935 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:22:27 crc kubenswrapper[4868]: I1003 13:22:27.265128 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll"] Oct 03 13:22:27 crc kubenswrapper[4868]: I1003 13:22:27.333918 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" event={"ID":"3876c0c5-a630-4bf0-9072-3bcf5889430c","Type":"ContainerStarted","Data":"a65dc43525e335877b9aa20c6e9fd0bfa709f68c8e6a13c780b6cd0ef95c3960"} Oct 03 13:22:30 crc kubenswrapper[4868]: I1003 13:22:30.366868 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" event={"ID":"3876c0c5-a630-4bf0-9072-3bcf5889430c","Type":"ContainerStarted","Data":"04c705874fe28a4954e744dd701c2ef914d2e37bff11a06db96cbf8fd3171028"} Oct 03 13:22:30 crc kubenswrapper[4868]: I1003 13:22:30.385362 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" podStartSLOduration=2.677553725 podStartE2EDuration="4.385332576s" podCreationTimestamp="2025-10-03 13:22:26 +0000 UTC" firstStartedPulling="2025-10-03 13:22:27.281572223 +0000 UTC m=+1943.491421289" lastFinishedPulling="2025-10-03 13:22:28.989351074 +0000 UTC m=+1945.199200140" observedRunningTime="2025-10-03 13:22:30.381990366 +0000 UTC m=+1946.591839442" watchObservedRunningTime="2025-10-03 13:22:30.385332576 +0000 UTC m=+1946.595181642" Oct 03 13:23:03 crc kubenswrapper[4868]: I1003 13:23:03.044917 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fwnkn"] Oct 03 13:23:03 crc kubenswrapper[4868]: I1003 13:23:03.055564 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-fwnkn"] Oct 03 13:23:04 crc kubenswrapper[4868]: I1003 13:23:04.555994 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="476384d7-320b-4e69-81ea-b5193f3c944a" path="/var/lib/kubelet/pods/476384d7-320b-4e69-81ea-b5193f3c944a/volumes" Oct 03 13:23:08 crc kubenswrapper[4868]: I1003 13:23:08.723449 4868 generic.go:334] "Generic (PLEG): container finished" podID="3876c0c5-a630-4bf0-9072-3bcf5889430c" containerID="04c705874fe28a4954e744dd701c2ef914d2e37bff11a06db96cbf8fd3171028" exitCode=0 Oct 03 13:23:08 crc kubenswrapper[4868]: I1003 13:23:08.724199 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" event={"ID":"3876c0c5-a630-4bf0-9072-3bcf5889430c","Type":"ContainerDied","Data":"04c705874fe28a4954e744dd701c2ef914d2e37bff11a06db96cbf8fd3171028"} Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.172575 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.295174 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgxql\" (UniqueName: \"kubernetes.io/projected/3876c0c5-a630-4bf0-9072-3bcf5889430c-kube-api-access-tgxql\") pod \"3876c0c5-a630-4bf0-9072-3bcf5889430c\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.295300 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-inventory\") pod \"3876c0c5-a630-4bf0-9072-3bcf5889430c\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.295450 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-ssh-key\") pod \"3876c0c5-a630-4bf0-9072-3bcf5889430c\" (UID: \"3876c0c5-a630-4bf0-9072-3bcf5889430c\") " Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.301839 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3876c0c5-a630-4bf0-9072-3bcf5889430c-kube-api-access-tgxql" (OuterVolumeSpecName: "kube-api-access-tgxql") pod "3876c0c5-a630-4bf0-9072-3bcf5889430c" (UID: "3876c0c5-a630-4bf0-9072-3bcf5889430c"). InnerVolumeSpecName "kube-api-access-tgxql". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.324433 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-inventory" (OuterVolumeSpecName: "inventory") pod "3876c0c5-a630-4bf0-9072-3bcf5889430c" (UID: "3876c0c5-a630-4bf0-9072-3bcf5889430c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.326666 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3876c0c5-a630-4bf0-9072-3bcf5889430c" (UID: "3876c0c5-a630-4bf0-9072-3bcf5889430c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.398713 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgxql\" (UniqueName: \"kubernetes.io/projected/3876c0c5-a630-4bf0-9072-3bcf5889430c-kube-api-access-tgxql\") on node \"crc\" DevicePath \"\"" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.398761 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.398775 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3876c0c5-a630-4bf0-9072-3bcf5889430c-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.741194 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" event={"ID":"3876c0c5-a630-4bf0-9072-3bcf5889430c","Type":"ContainerDied","Data":"a65dc43525e335877b9aa20c6e9fd0bfa709f68c8e6a13c780b6cd0ef95c3960"} Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.741237 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a65dc43525e335877b9aa20c6e9fd0bfa709f68c8e6a13c780b6cd0ef95c3960" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.741271 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-bdgll" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.833121 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx"] Oct 03 13:23:10 crc kubenswrapper[4868]: E1003 13:23:10.833984 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3876c0c5-a630-4bf0-9072-3bcf5889430c" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.834096 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="3876c0c5-a630-4bf0-9072-3bcf5889430c" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.834323 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="3876c0c5-a630-4bf0-9072-3bcf5889430c" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.836259 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.839447 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.839556 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.839715 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.839827 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:23:10 crc kubenswrapper[4868]: I1003 13:23:10.843587 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx"] Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.009328 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.009398 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnd4n\" (UniqueName: \"kubernetes.io/projected/8b59ed35-42bc-4fad-ad96-28152d3234cd-kube-api-access-xnd4n\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.009464 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.112027 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.112169 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnd4n\" (UniqueName: \"kubernetes.io/projected/8b59ed35-42bc-4fad-ad96-28152d3234cd-kube-api-access-xnd4n\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.112662 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.122785 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.123164 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.137703 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnd4n\" (UniqueName: \"kubernetes.io/projected/8b59ed35-42bc-4fad-ad96-28152d3234cd-kube-api-access-xnd4n\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-65tvx\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.162453 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.700027 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx"] Oct 03 13:23:11 crc kubenswrapper[4868]: I1003 13:23:11.755349 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" event={"ID":"8b59ed35-42bc-4fad-ad96-28152d3234cd","Type":"ContainerStarted","Data":"f4bb95da90c353b8d4980ce21fe482baa7cf4a68be6e43943b74c61fe97a41de"} Oct 03 13:23:12 crc kubenswrapper[4868]: I1003 13:23:12.765685 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" event={"ID":"8b59ed35-42bc-4fad-ad96-28152d3234cd","Type":"ContainerStarted","Data":"9bba79528c83230bcd24edf0b732b453648512e5aa766e3756cfc928067bad6c"} Oct 03 13:23:12 crc kubenswrapper[4868]: I1003 13:23:12.786760 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" podStartSLOduration=2.5531992839999997 podStartE2EDuration="2.786742127s" podCreationTimestamp="2025-10-03 13:23:10 +0000 UTC" firstStartedPulling="2025-10-03 13:23:11.702474829 +0000 UTC m=+1987.912323895" lastFinishedPulling="2025-10-03 13:23:11.936017682 +0000 UTC m=+1988.145866738" observedRunningTime="2025-10-03 13:23:12.778960788 +0000 UTC m=+1988.988809854" watchObservedRunningTime="2025-10-03 13:23:12.786742127 +0000 UTC m=+1988.996591193" Oct 03 13:23:15 crc kubenswrapper[4868]: I1003 13:23:15.155252 4868 scope.go:117] "RemoveContainer" containerID="a84de38853047c2c0293b71c0de505d17e471f9c2b65068b0daad3f50e291fce" Oct 03 13:23:15 crc kubenswrapper[4868]: I1003 13:23:15.178494 4868 scope.go:117] "RemoveContainer" containerID="f8643a2cdc5cd948282c56165245bf88d8d3cb4611968c8fa99a836c64542588" Oct 03 13:23:15 crc kubenswrapper[4868]: I1003 13:23:15.244073 4868 scope.go:117] "RemoveContainer" containerID="d33c69860a250dd64929097216750813df2f25f502ae28b5d02c2f11dd33c1b3" Oct 03 13:23:15 crc kubenswrapper[4868]: I1003 13:23:15.266154 4868 scope.go:117] "RemoveContainer" containerID="67e89954d0d2edc51b22d8ccedd8361ae125c5ecb1f2fbf1be891f048c8a8abd" Oct 03 13:23:29 crc kubenswrapper[4868]: I1003 13:23:29.043800 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-89vph"] Oct 03 13:23:29 crc kubenswrapper[4868]: I1003 13:23:29.051363 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-89vph"] Oct 03 13:23:30 crc kubenswrapper[4868]: I1003 13:23:30.565188 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6be42dc6-1220-4751-8201-dbcf019309ce" path="/var/lib/kubelet/pods/6be42dc6-1220-4751-8201-dbcf019309ce/volumes" Oct 03 13:23:31 crc kubenswrapper[4868]: I1003 13:23:31.033411 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7km5p"] Oct 03 13:23:31 crc kubenswrapper[4868]: I1003 13:23:31.042114 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-7km5p"] Oct 03 13:23:32 crc kubenswrapper[4868]: I1003 13:23:32.555783 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52fc4aac-675f-4824-9a11-e41a71de1c88" path="/var/lib/kubelet/pods/52fc4aac-675f-4824-9a11-e41a71de1c88/volumes" Oct 03 13:24:02 crc kubenswrapper[4868]: I1003 13:24:02.145743 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:24:02 crc kubenswrapper[4868]: I1003 13:24:02.146658 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:24:09 crc kubenswrapper[4868]: I1003 13:24:09.304215 4868 generic.go:334] "Generic (PLEG): container finished" podID="8b59ed35-42bc-4fad-ad96-28152d3234cd" containerID="9bba79528c83230bcd24edf0b732b453648512e5aa766e3756cfc928067bad6c" exitCode=2 Oct 03 13:24:09 crc kubenswrapper[4868]: I1003 13:24:09.304324 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" event={"ID":"8b59ed35-42bc-4fad-ad96-28152d3234cd","Type":"ContainerDied","Data":"9bba79528c83230bcd24edf0b732b453648512e5aa766e3756cfc928067bad6c"} Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.717564 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.809881 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-ssh-key\") pod \"8b59ed35-42bc-4fad-ad96-28152d3234cd\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.810003 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xnd4n\" (UniqueName: \"kubernetes.io/projected/8b59ed35-42bc-4fad-ad96-28152d3234cd-kube-api-access-xnd4n\") pod \"8b59ed35-42bc-4fad-ad96-28152d3234cd\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.810115 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-inventory\") pod \"8b59ed35-42bc-4fad-ad96-28152d3234cd\" (UID: \"8b59ed35-42bc-4fad-ad96-28152d3234cd\") " Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.815923 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8b59ed35-42bc-4fad-ad96-28152d3234cd-kube-api-access-xnd4n" (OuterVolumeSpecName: "kube-api-access-xnd4n") pod "8b59ed35-42bc-4fad-ad96-28152d3234cd" (UID: "8b59ed35-42bc-4fad-ad96-28152d3234cd"). InnerVolumeSpecName "kube-api-access-xnd4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.837833 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-inventory" (OuterVolumeSpecName: "inventory") pod "8b59ed35-42bc-4fad-ad96-28152d3234cd" (UID: "8b59ed35-42bc-4fad-ad96-28152d3234cd"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.845751 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8b59ed35-42bc-4fad-ad96-28152d3234cd" (UID: "8b59ed35-42bc-4fad-ad96-28152d3234cd"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.912448 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.912486 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xnd4n\" (UniqueName: \"kubernetes.io/projected/8b59ed35-42bc-4fad-ad96-28152d3234cd-kube-api-access-xnd4n\") on node \"crc\" DevicePath \"\"" Oct 03 13:24:10 crc kubenswrapper[4868]: I1003 13:24:10.912509 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8b59ed35-42bc-4fad-ad96-28152d3234cd-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:24:11 crc kubenswrapper[4868]: I1003 13:24:11.326553 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" event={"ID":"8b59ed35-42bc-4fad-ad96-28152d3234cd","Type":"ContainerDied","Data":"f4bb95da90c353b8d4980ce21fe482baa7cf4a68be6e43943b74c61fe97a41de"} Oct 03 13:24:11 crc kubenswrapper[4868]: I1003 13:24:11.326589 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-65tvx" Oct 03 13:24:11 crc kubenswrapper[4868]: I1003 13:24:11.326608 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f4bb95da90c353b8d4980ce21fe482baa7cf4a68be6e43943b74c61fe97a41de" Oct 03 13:24:14 crc kubenswrapper[4868]: I1003 13:24:14.044106 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-n75ck"] Oct 03 13:24:14 crc kubenswrapper[4868]: I1003 13:24:14.050982 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-n75ck"] Oct 03 13:24:14 crc kubenswrapper[4868]: I1003 13:24:14.557021 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70a93443-6a9b-42e8-8c7d-c8df6560e009" path="/var/lib/kubelet/pods/70a93443-6a9b-42e8-8c7d-c8df6560e009/volumes" Oct 03 13:24:15 crc kubenswrapper[4868]: I1003 13:24:15.382490 4868 scope.go:117] "RemoveContainer" containerID="697b9bff66083546f15f478a2f1e57a3755b1b658cdc7319a0a6f9e48b1e2458" Oct 03 13:24:15 crc kubenswrapper[4868]: I1003 13:24:15.437833 4868 scope.go:117] "RemoveContainer" containerID="6d9c47d21a1dfc03f815bcc1923aa1d91e62d31cfcee13fee16f2b0f6c86109b" Oct 03 13:24:15 crc kubenswrapper[4868]: I1003 13:24:15.495013 4868 scope.go:117] "RemoveContainer" containerID="216306e42fbd12051befacf7602b05e22d18cec292b4bc8dbd03af040c3a212d" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.028578 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l"] Oct 03 13:24:19 crc kubenswrapper[4868]: E1003 13:24:19.029399 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8b59ed35-42bc-4fad-ad96-28152d3234cd" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.029415 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8b59ed35-42bc-4fad-ad96-28152d3234cd" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.029627 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="8b59ed35-42bc-4fad-ad96-28152d3234cd" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.030329 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.032125 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.032385 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.032860 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.033330 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.045160 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l"] Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.190308 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.190388 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.190426 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scb9f\" (UniqueName: \"kubernetes.io/projected/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-kube-api-access-scb9f\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.292527 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.292608 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scb9f\" (UniqueName: \"kubernetes.io/projected/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-kube-api-access-scb9f\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.292767 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.299355 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.299665 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.309533 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scb9f\" (UniqueName: \"kubernetes.io/projected/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-kube-api-access-scb9f\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.360758 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:24:19 crc kubenswrapper[4868]: I1003 13:24:19.900018 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l"] Oct 03 13:24:20 crc kubenswrapper[4868]: I1003 13:24:20.430796 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" event={"ID":"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7","Type":"ContainerStarted","Data":"cb7817e6de141a55cb8dcf6b0ccaef7cd9efa3d6690a518910e7c5d671a76c22"} Oct 03 13:24:20 crc kubenswrapper[4868]: I1003 13:24:20.431268 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" event={"ID":"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7","Type":"ContainerStarted","Data":"0ebc20577eb0f1041f845462c60f048011ca9ebed7222051e613171f159e822e"} Oct 03 13:24:20 crc kubenswrapper[4868]: I1003 13:24:20.455681 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" podStartSLOduration=1.273573662 podStartE2EDuration="1.45565591s" podCreationTimestamp="2025-10-03 13:24:19 +0000 UTC" firstStartedPulling="2025-10-03 13:24:19.925110668 +0000 UTC m=+2056.134959744" lastFinishedPulling="2025-10-03 13:24:20.107192926 +0000 UTC m=+2056.317041992" observedRunningTime="2025-10-03 13:24:20.449729181 +0000 UTC m=+2056.659578247" watchObservedRunningTime="2025-10-03 13:24:20.45565591 +0000 UTC m=+2056.665504976" Oct 03 13:24:32 crc kubenswrapper[4868]: I1003 13:24:32.145587 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:24:32 crc kubenswrapper[4868]: I1003 13:24:32.146192 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.100919 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9t8jp"] Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.103808 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.116503 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9t8jp"] Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.247312 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-catalog-content\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.247356 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-utilities\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.247551 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45jmp\" (UniqueName: \"kubernetes.io/projected/51529561-710c-4b1d-ba79-b9d7b8b5ae07-kube-api-access-45jmp\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.349857 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45jmp\" (UniqueName: \"kubernetes.io/projected/51529561-710c-4b1d-ba79-b9d7b8b5ae07-kube-api-access-45jmp\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.350410 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-catalog-content\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.350478 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-utilities\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.350997 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-catalog-content\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.351293 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-utilities\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.373613 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45jmp\" (UniqueName: \"kubernetes.io/projected/51529561-710c-4b1d-ba79-b9d7b8b5ae07-kube-api-access-45jmp\") pod \"redhat-operators-9t8jp\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.427877 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:41 crc kubenswrapper[4868]: I1003 13:24:41.932046 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9t8jp"] Oct 03 13:24:42 crc kubenswrapper[4868]: I1003 13:24:42.652030 4868 generic.go:334] "Generic (PLEG): container finished" podID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerID="f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268" exitCode=0 Oct 03 13:24:42 crc kubenswrapper[4868]: I1003 13:24:42.652366 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t8jp" event={"ID":"51529561-710c-4b1d-ba79-b9d7b8b5ae07","Type":"ContainerDied","Data":"f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268"} Oct 03 13:24:42 crc kubenswrapper[4868]: I1003 13:24:42.652761 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t8jp" event={"ID":"51529561-710c-4b1d-ba79-b9d7b8b5ae07","Type":"ContainerStarted","Data":"59b307547428a8c1449636a82d34a841b307a634f61af54fd5895faaeec33836"} Oct 03 13:24:42 crc kubenswrapper[4868]: I1003 13:24:42.655772 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:24:44 crc kubenswrapper[4868]: E1003 13:24:44.237875 4868 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod51529561_710c_4b1d_ba79_b9d7b8b5ae07.slice/crio-conmon-fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583.scope\": RecentStats: unable to find data in memory cache]" Oct 03 13:24:44 crc kubenswrapper[4868]: I1003 13:24:44.671878 4868 generic.go:334] "Generic (PLEG): container finished" podID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerID="fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583" exitCode=0 Oct 03 13:24:44 crc kubenswrapper[4868]: I1003 13:24:44.671939 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t8jp" event={"ID":"51529561-710c-4b1d-ba79-b9d7b8b5ae07","Type":"ContainerDied","Data":"fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583"} Oct 03 13:24:47 crc kubenswrapper[4868]: I1003 13:24:47.700241 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t8jp" event={"ID":"51529561-710c-4b1d-ba79-b9d7b8b5ae07","Type":"ContainerStarted","Data":"4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3"} Oct 03 13:24:47 crc kubenswrapper[4868]: I1003 13:24:47.726134 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9t8jp" podStartSLOduration=2.425514293 podStartE2EDuration="6.726110221s" podCreationTimestamp="2025-10-03 13:24:41 +0000 UTC" firstStartedPulling="2025-10-03 13:24:42.6555124 +0000 UTC m=+2078.865361466" lastFinishedPulling="2025-10-03 13:24:46.956108328 +0000 UTC m=+2083.165957394" observedRunningTime="2025-10-03 13:24:47.718995539 +0000 UTC m=+2083.928844605" watchObservedRunningTime="2025-10-03 13:24:47.726110221 +0000 UTC m=+2083.935959297" Oct 03 13:24:51 crc kubenswrapper[4868]: I1003 13:24:51.428266 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:51 crc kubenswrapper[4868]: I1003 13:24:51.428770 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:24:51 crc kubenswrapper[4868]: I1003 13:24:51.494865 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:25:01 crc kubenswrapper[4868]: I1003 13:25:01.481448 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:25:01 crc kubenswrapper[4868]: I1003 13:25:01.542139 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9t8jp"] Oct 03 13:25:01 crc kubenswrapper[4868]: I1003 13:25:01.839376 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9t8jp" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="registry-server" containerID="cri-o://4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3" gracePeriod=2 Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.145551 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.145684 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.146207 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.148427 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74f4ebe53929f202105f99fe9a6e9f2a2ffdd2b6bf57fa49540987a75c9833e2"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.148562 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://74f4ebe53929f202105f99fe9a6e9f2a2ffdd2b6bf57fa49540987a75c9833e2" gracePeriod=600 Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.337335 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.521591 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45jmp\" (UniqueName: \"kubernetes.io/projected/51529561-710c-4b1d-ba79-b9d7b8b5ae07-kube-api-access-45jmp\") pod \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.523779 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-catalog-content\") pod \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.524140 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-utilities\") pod \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\" (UID: \"51529561-710c-4b1d-ba79-b9d7b8b5ae07\") " Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.525228 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-utilities" (OuterVolumeSpecName: "utilities") pod "51529561-710c-4b1d-ba79-b9d7b8b5ae07" (UID: "51529561-710c-4b1d-ba79-b9d7b8b5ae07"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.530479 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/51529561-710c-4b1d-ba79-b9d7b8b5ae07-kube-api-access-45jmp" (OuterVolumeSpecName: "kube-api-access-45jmp") pod "51529561-710c-4b1d-ba79-b9d7b8b5ae07" (UID: "51529561-710c-4b1d-ba79-b9d7b8b5ae07"). InnerVolumeSpecName "kube-api-access-45jmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.602646 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "51529561-710c-4b1d-ba79-b9d7b8b5ae07" (UID: "51529561-710c-4b1d-ba79-b9d7b8b5ae07"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.628793 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45jmp\" (UniqueName: \"kubernetes.io/projected/51529561-710c-4b1d-ba79-b9d7b8b5ae07-kube-api-access-45jmp\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.628841 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.628855 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/51529561-710c-4b1d-ba79-b9d7b8b5ae07-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.850617 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="74f4ebe53929f202105f99fe9a6e9f2a2ffdd2b6bf57fa49540987a75c9833e2" exitCode=0 Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.850836 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"74f4ebe53929f202105f99fe9a6e9f2a2ffdd2b6bf57fa49540987a75c9833e2"} Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.851505 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11"} Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.851621 4868 scope.go:117] "RemoveContainer" containerID="dfccaaf31533a5429a264126946e9825813ee3cb38e379311485038673a29c31" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.858348 4868 generic.go:334] "Generic (PLEG): container finished" podID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerID="4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3" exitCode=0 Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.858411 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t8jp" event={"ID":"51529561-710c-4b1d-ba79-b9d7b8b5ae07","Type":"ContainerDied","Data":"4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3"} Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.858448 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9t8jp" event={"ID":"51529561-710c-4b1d-ba79-b9d7b8b5ae07","Type":"ContainerDied","Data":"59b307547428a8c1449636a82d34a841b307a634f61af54fd5895faaeec33836"} Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.858513 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9t8jp" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.899729 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9t8jp"] Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.903600 4868 scope.go:117] "RemoveContainer" containerID="4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.907527 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9t8jp"] Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.928509 4868 scope.go:117] "RemoveContainer" containerID="fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583" Oct 03 13:25:02 crc kubenswrapper[4868]: I1003 13:25:02.957721 4868 scope.go:117] "RemoveContainer" containerID="f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268" Oct 03 13:25:03 crc kubenswrapper[4868]: I1003 13:25:03.018397 4868 scope.go:117] "RemoveContainer" containerID="4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3" Oct 03 13:25:03 crc kubenswrapper[4868]: E1003 13:25:03.019104 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3\": container with ID starting with 4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3 not found: ID does not exist" containerID="4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3" Oct 03 13:25:03 crc kubenswrapper[4868]: I1003 13:25:03.019159 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3"} err="failed to get container status \"4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3\": rpc error: code = NotFound desc = could not find container \"4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3\": container with ID starting with 4db9052727f12a6e587dc67c140c431d3db1f31b9dca35ee4189068279865ba3 not found: ID does not exist" Oct 03 13:25:03 crc kubenswrapper[4868]: I1003 13:25:03.019193 4868 scope.go:117] "RemoveContainer" containerID="fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583" Oct 03 13:25:03 crc kubenswrapper[4868]: E1003 13:25:03.019660 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583\": container with ID starting with fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583 not found: ID does not exist" containerID="fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583" Oct 03 13:25:03 crc kubenswrapper[4868]: I1003 13:25:03.019718 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583"} err="failed to get container status \"fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583\": rpc error: code = NotFound desc = could not find container \"fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583\": container with ID starting with fa0757cf2ee14cd6d1cd019d40e4ef016bc661283520a83e0602d965cc7f9583 not found: ID does not exist" Oct 03 13:25:03 crc kubenswrapper[4868]: I1003 13:25:03.019750 4868 scope.go:117] "RemoveContainer" containerID="f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268" Oct 03 13:25:03 crc kubenswrapper[4868]: E1003 13:25:03.021166 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268\": container with ID starting with f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268 not found: ID does not exist" containerID="f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268" Oct 03 13:25:03 crc kubenswrapper[4868]: I1003 13:25:03.021202 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268"} err="failed to get container status \"f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268\": rpc error: code = NotFound desc = could not find container \"f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268\": container with ID starting with f6059ae664df2662ff49f8b6f008fb1a0f8a062b728e7bdb349d1c401763e268 not found: ID does not exist" Oct 03 13:25:04 crc kubenswrapper[4868]: I1003 13:25:04.587901 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" path="/var/lib/kubelet/pods/51529561-710c-4b1d-ba79-b9d7b8b5ae07/volumes" Oct 03 13:25:06 crc kubenswrapper[4868]: I1003 13:25:06.919872 4868 generic.go:334] "Generic (PLEG): container finished" podID="afe26f8a-45f7-4f06-b6ef-3584fe8b51a7" containerID="cb7817e6de141a55cb8dcf6b0ccaef7cd9efa3d6690a518910e7c5d671a76c22" exitCode=0 Oct 03 13:25:06 crc kubenswrapper[4868]: I1003 13:25:06.919967 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" event={"ID":"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7","Type":"ContainerDied","Data":"cb7817e6de141a55cb8dcf6b0ccaef7cd9efa3d6690a518910e7c5d671a76c22"} Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.384428 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.464481 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scb9f\" (UniqueName: \"kubernetes.io/projected/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-kube-api-access-scb9f\") pod \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.464530 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-ssh-key\") pod \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.464636 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-inventory\") pod \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\" (UID: \"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7\") " Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.470996 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-kube-api-access-scb9f" (OuterVolumeSpecName: "kube-api-access-scb9f") pod "afe26f8a-45f7-4f06-b6ef-3584fe8b51a7" (UID: "afe26f8a-45f7-4f06-b6ef-3584fe8b51a7"). InnerVolumeSpecName "kube-api-access-scb9f". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.494302 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-inventory" (OuterVolumeSpecName: "inventory") pod "afe26f8a-45f7-4f06-b6ef-3584fe8b51a7" (UID: "afe26f8a-45f7-4f06-b6ef-3584fe8b51a7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.494356 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "afe26f8a-45f7-4f06-b6ef-3584fe8b51a7" (UID: "afe26f8a-45f7-4f06-b6ef-3584fe8b51a7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.568297 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.568344 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scb9f\" (UniqueName: \"kubernetes.io/projected/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-kube-api-access-scb9f\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.568361 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/afe26f8a-45f7-4f06-b6ef-3584fe8b51a7-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.940915 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" event={"ID":"afe26f8a-45f7-4f06-b6ef-3584fe8b51a7","Type":"ContainerDied","Data":"0ebc20577eb0f1041f845462c60f048011ca9ebed7222051e613171f159e822e"} Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.941465 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0ebc20577eb0f1041f845462c60f048011ca9ebed7222051e613171f159e822e" Oct 03 13:25:08 crc kubenswrapper[4868]: I1003 13:25:08.941105 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.034204 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-tgkrx"] Oct 03 13:25:09 crc kubenswrapper[4868]: E1003 13:25:09.034600 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="extract-content" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.034615 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="extract-content" Oct 03 13:25:09 crc kubenswrapper[4868]: E1003 13:25:09.034648 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afe26f8a-45f7-4f06-b6ef-3584fe8b51a7" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.034659 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="afe26f8a-45f7-4f06-b6ef-3584fe8b51a7" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:09 crc kubenswrapper[4868]: E1003 13:25:09.034674 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="extract-utilities" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.034682 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="extract-utilities" Oct 03 13:25:09 crc kubenswrapper[4868]: E1003 13:25:09.034706 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="registry-server" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.034713 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="registry-server" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.034919 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="51529561-710c-4b1d-ba79-b9d7b8b5ae07" containerName="registry-server" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.034941 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="afe26f8a-45f7-4f06-b6ef-3584fe8b51a7" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.035772 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.040924 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.041301 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.041448 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.041945 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.050368 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-tgkrx"] Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.182229 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wphlb\" (UniqueName: \"kubernetes.io/projected/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-kube-api-access-wphlb\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.182294 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.182405 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.284758 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wphlb\" (UniqueName: \"kubernetes.io/projected/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-kube-api-access-wphlb\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.284818 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.284863 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.290525 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.291009 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.303940 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wphlb\" (UniqueName: \"kubernetes.io/projected/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-kube-api-access-wphlb\") pod \"ssh-known-hosts-edpm-deployment-tgkrx\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.353624 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:09 crc kubenswrapper[4868]: I1003 13:25:09.952216 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-tgkrx"] Oct 03 13:25:10 crc kubenswrapper[4868]: I1003 13:25:10.961600 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" event={"ID":"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4","Type":"ContainerStarted","Data":"491a7af7152d2bc22c5fd18fb80c7e633dda574b9c7bff9684a0c9c24ac63f06"} Oct 03 13:25:10 crc kubenswrapper[4868]: I1003 13:25:10.962367 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" event={"ID":"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4","Type":"ContainerStarted","Data":"7f126e58a341f245bd9f748c19b7c771db316051260a38bc4ffde807b875fe67"} Oct 03 13:25:17 crc kubenswrapper[4868]: I1003 13:25:17.018894 4868 generic.go:334] "Generic (PLEG): container finished" podID="ddfbf7b2-102d-4874-8b6e-d322d5eaabc4" containerID="491a7af7152d2bc22c5fd18fb80c7e633dda574b9c7bff9684a0c9c24ac63f06" exitCode=0 Oct 03 13:25:17 crc kubenswrapper[4868]: I1003 13:25:17.018978 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" event={"ID":"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4","Type":"ContainerDied","Data":"491a7af7152d2bc22c5fd18fb80c7e633dda574b9c7bff9684a0c9c24ac63f06"} Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.427151 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.577433 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-ssh-key-openstack-edpm-ipam\") pod \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.577615 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-inventory-0\") pod \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.577902 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wphlb\" (UniqueName: \"kubernetes.io/projected/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-kube-api-access-wphlb\") pod \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\" (UID: \"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4\") " Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.585531 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-kube-api-access-wphlb" (OuterVolumeSpecName: "kube-api-access-wphlb") pod "ddfbf7b2-102d-4874-8b6e-d322d5eaabc4" (UID: "ddfbf7b2-102d-4874-8b6e-d322d5eaabc4"). InnerVolumeSpecName "kube-api-access-wphlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.607142 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "ddfbf7b2-102d-4874-8b6e-d322d5eaabc4" (UID: "ddfbf7b2-102d-4874-8b6e-d322d5eaabc4"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.609741 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "ddfbf7b2-102d-4874-8b6e-d322d5eaabc4" (UID: "ddfbf7b2-102d-4874-8b6e-d322d5eaabc4"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.680325 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wphlb\" (UniqueName: \"kubernetes.io/projected/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-kube-api-access-wphlb\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.680365 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:18 crc kubenswrapper[4868]: I1003 13:25:18.680375 4868 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/ddfbf7b2-102d-4874-8b6e-d322d5eaabc4-inventory-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.039795 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" event={"ID":"ddfbf7b2-102d-4874-8b6e-d322d5eaabc4","Type":"ContainerDied","Data":"7f126e58a341f245bd9f748c19b7c771db316051260a38bc4ffde807b875fe67"} Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.039843 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f126e58a341f245bd9f748c19b7c771db316051260a38bc4ffde807b875fe67" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.039873 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-tgkrx" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.165837 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh"] Oct 03 13:25:19 crc kubenswrapper[4868]: E1003 13:25:19.166578 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ddfbf7b2-102d-4874-8b6e-d322d5eaabc4" containerName="ssh-known-hosts-edpm-deployment" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.166600 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ddfbf7b2-102d-4874-8b6e-d322d5eaabc4" containerName="ssh-known-hosts-edpm-deployment" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.166803 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ddfbf7b2-102d-4874-8b6e-d322d5eaabc4" containerName="ssh-known-hosts-edpm-deployment" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.167603 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.170217 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.170476 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.170979 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.171166 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.190923 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh"] Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.290910 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.291198 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.291285 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hcp8c\" (UniqueName: \"kubernetes.io/projected/ea96ad6b-3737-4dab-849e-b633d1ecc135-kube-api-access-hcp8c\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.393795 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.393850 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hcp8c\" (UniqueName: \"kubernetes.io/projected/ea96ad6b-3737-4dab-849e-b633d1ecc135-kube-api-access-hcp8c\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.393949 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.399740 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.400875 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.411582 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hcp8c\" (UniqueName: \"kubernetes.io/projected/ea96ad6b-3737-4dab-849e-b633d1ecc135-kube-api-access-hcp8c\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-9hdjh\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:19 crc kubenswrapper[4868]: I1003 13:25:19.495088 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:20 crc kubenswrapper[4868]: I1003 13:25:20.015495 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh"] Oct 03 13:25:20 crc kubenswrapper[4868]: I1003 13:25:20.048712 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" event={"ID":"ea96ad6b-3737-4dab-849e-b633d1ecc135","Type":"ContainerStarted","Data":"949fa1f3aebaa2f14cb42aba2105ef0e2e1858e72993b3052e5eb6632300bf58"} Oct 03 13:25:21 crc kubenswrapper[4868]: I1003 13:25:21.058758 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" event={"ID":"ea96ad6b-3737-4dab-849e-b633d1ecc135","Type":"ContainerStarted","Data":"be5fbea20766772ebd87b02336e1808a56cfb0594198e64cc5d076709c0d4d90"} Oct 03 13:25:21 crc kubenswrapper[4868]: I1003 13:25:21.078487 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" podStartSLOduration=1.918642218 podStartE2EDuration="2.078466748s" podCreationTimestamp="2025-10-03 13:25:19 +0000 UTC" firstStartedPulling="2025-10-03 13:25:20.016157011 +0000 UTC m=+2116.226006077" lastFinishedPulling="2025-10-03 13:25:20.175981541 +0000 UTC m=+2116.385830607" observedRunningTime="2025-10-03 13:25:21.07296784 +0000 UTC m=+2117.282816906" watchObservedRunningTime="2025-10-03 13:25:21.078466748 +0000 UTC m=+2117.288315834" Oct 03 13:25:29 crc kubenswrapper[4868]: I1003 13:25:29.149904 4868 generic.go:334] "Generic (PLEG): container finished" podID="ea96ad6b-3737-4dab-849e-b633d1ecc135" containerID="be5fbea20766772ebd87b02336e1808a56cfb0594198e64cc5d076709c0d4d90" exitCode=0 Oct 03 13:25:29 crc kubenswrapper[4868]: I1003 13:25:29.150029 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" event={"ID":"ea96ad6b-3737-4dab-849e-b633d1ecc135","Type":"ContainerDied","Data":"be5fbea20766772ebd87b02336e1808a56cfb0594198e64cc5d076709c0d4d90"} Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.592659 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.744327 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-inventory\") pod \"ea96ad6b-3737-4dab-849e-b633d1ecc135\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.744418 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-ssh-key\") pod \"ea96ad6b-3737-4dab-849e-b633d1ecc135\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.744478 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hcp8c\" (UniqueName: \"kubernetes.io/projected/ea96ad6b-3737-4dab-849e-b633d1ecc135-kube-api-access-hcp8c\") pod \"ea96ad6b-3737-4dab-849e-b633d1ecc135\" (UID: \"ea96ad6b-3737-4dab-849e-b633d1ecc135\") " Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.750234 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea96ad6b-3737-4dab-849e-b633d1ecc135-kube-api-access-hcp8c" (OuterVolumeSpecName: "kube-api-access-hcp8c") pod "ea96ad6b-3737-4dab-849e-b633d1ecc135" (UID: "ea96ad6b-3737-4dab-849e-b633d1ecc135"). InnerVolumeSpecName "kube-api-access-hcp8c". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.776560 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "ea96ad6b-3737-4dab-849e-b633d1ecc135" (UID: "ea96ad6b-3737-4dab-849e-b633d1ecc135"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.779913 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-inventory" (OuterVolumeSpecName: "inventory") pod "ea96ad6b-3737-4dab-849e-b633d1ecc135" (UID: "ea96ad6b-3737-4dab-849e-b633d1ecc135"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.848514 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.848582 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hcp8c\" (UniqueName: \"kubernetes.io/projected/ea96ad6b-3737-4dab-849e-b633d1ecc135-kube-api-access-hcp8c\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:30 crc kubenswrapper[4868]: I1003 13:25:30.848599 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/ea96ad6b-3737-4dab-849e-b633d1ecc135-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.170622 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" event={"ID":"ea96ad6b-3737-4dab-849e-b633d1ecc135","Type":"ContainerDied","Data":"949fa1f3aebaa2f14cb42aba2105ef0e2e1858e72993b3052e5eb6632300bf58"} Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.170673 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="949fa1f3aebaa2f14cb42aba2105ef0e2e1858e72993b3052e5eb6632300bf58" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.170676 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-9hdjh" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.252556 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9"] Oct 03 13:25:31 crc kubenswrapper[4868]: E1003 13:25:31.253369 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea96ad6b-3737-4dab-849e-b633d1ecc135" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.253401 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea96ad6b-3737-4dab-849e-b633d1ecc135" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.253720 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea96ad6b-3737-4dab-849e-b633d1ecc135" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.255299 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.256118 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.256311 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvsmd\" (UniqueName: \"kubernetes.io/projected/5ee021a5-0c14-460b-afdb-5b73f394355d-kube-api-access-mvsmd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.256410 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.259190 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.260811 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.261328 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.261357 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.263673 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9"] Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.357996 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.358120 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.358179 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvsmd\" (UniqueName: \"kubernetes.io/projected/5ee021a5-0c14-460b-afdb-5b73f394355d-kube-api-access-mvsmd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.362488 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.367714 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.382153 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvsmd\" (UniqueName: \"kubernetes.io/projected/5ee021a5-0c14-460b-afdb-5b73f394355d-kube-api-access-mvsmd\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:31 crc kubenswrapper[4868]: I1003 13:25:31.571559 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:32 crc kubenswrapper[4868]: I1003 13:25:32.075002 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9"] Oct 03 13:25:32 crc kubenswrapper[4868]: W1003 13:25:32.079960 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5ee021a5_0c14_460b_afdb_5b73f394355d.slice/crio-4f1bfee4a1b3a2a3154b4c1f4b20bd2e6d2b0443f01f0e720221efea20f6a6bf WatchSource:0}: Error finding container 4f1bfee4a1b3a2a3154b4c1f4b20bd2e6d2b0443f01f0e720221efea20f6a6bf: Status 404 returned error can't find the container with id 4f1bfee4a1b3a2a3154b4c1f4b20bd2e6d2b0443f01f0e720221efea20f6a6bf Oct 03 13:25:32 crc kubenswrapper[4868]: I1003 13:25:32.180334 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" event={"ID":"5ee021a5-0c14-460b-afdb-5b73f394355d","Type":"ContainerStarted","Data":"4f1bfee4a1b3a2a3154b4c1f4b20bd2e6d2b0443f01f0e720221efea20f6a6bf"} Oct 03 13:25:33 crc kubenswrapper[4868]: I1003 13:25:33.190791 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" event={"ID":"5ee021a5-0c14-460b-afdb-5b73f394355d","Type":"ContainerStarted","Data":"8dad64f6b7a99ec6f4f6327c3f1c7bcb30316b039af1afd15e99d8ff2331737b"} Oct 03 13:25:33 crc kubenswrapper[4868]: I1003 13:25:33.215815 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" podStartSLOduration=2.047994264 podStartE2EDuration="2.215773677s" podCreationTimestamp="2025-10-03 13:25:31 +0000 UTC" firstStartedPulling="2025-10-03 13:25:32.082372057 +0000 UTC m=+2128.292221123" lastFinishedPulling="2025-10-03 13:25:32.25015147 +0000 UTC m=+2128.460000536" observedRunningTime="2025-10-03 13:25:33.210682549 +0000 UTC m=+2129.420531635" watchObservedRunningTime="2025-10-03 13:25:33.215773677 +0000 UTC m=+2129.425622743" Oct 03 13:25:42 crc kubenswrapper[4868]: I1003 13:25:42.268831 4868 generic.go:334] "Generic (PLEG): container finished" podID="5ee021a5-0c14-460b-afdb-5b73f394355d" containerID="8dad64f6b7a99ec6f4f6327c3f1c7bcb30316b039af1afd15e99d8ff2331737b" exitCode=0 Oct 03 13:25:42 crc kubenswrapper[4868]: I1003 13:25:42.268933 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" event={"ID":"5ee021a5-0c14-460b-afdb-5b73f394355d","Type":"ContainerDied","Data":"8dad64f6b7a99ec6f4f6327c3f1c7bcb30316b039af1afd15e99d8ff2331737b"} Oct 03 13:25:43 crc kubenswrapper[4868]: I1003 13:25:43.720673 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:43 crc kubenswrapper[4868]: I1003 13:25:43.908902 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-ssh-key\") pod \"5ee021a5-0c14-460b-afdb-5b73f394355d\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " Oct 03 13:25:43 crc kubenswrapper[4868]: I1003 13:25:43.909030 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mvsmd\" (UniqueName: \"kubernetes.io/projected/5ee021a5-0c14-460b-afdb-5b73f394355d-kube-api-access-mvsmd\") pod \"5ee021a5-0c14-460b-afdb-5b73f394355d\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " Oct 03 13:25:43 crc kubenswrapper[4868]: I1003 13:25:43.909237 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-inventory\") pod \"5ee021a5-0c14-460b-afdb-5b73f394355d\" (UID: \"5ee021a5-0c14-460b-afdb-5b73f394355d\") " Oct 03 13:25:43 crc kubenswrapper[4868]: I1003 13:25:43.915838 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ee021a5-0c14-460b-afdb-5b73f394355d-kube-api-access-mvsmd" (OuterVolumeSpecName: "kube-api-access-mvsmd") pod "5ee021a5-0c14-460b-afdb-5b73f394355d" (UID: "5ee021a5-0c14-460b-afdb-5b73f394355d"). InnerVolumeSpecName "kube-api-access-mvsmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:25:43 crc kubenswrapper[4868]: I1003 13:25:43.938828 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5ee021a5-0c14-460b-afdb-5b73f394355d" (UID: "5ee021a5-0c14-460b-afdb-5b73f394355d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:43 crc kubenswrapper[4868]: I1003 13:25:43.939271 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-inventory" (OuterVolumeSpecName: "inventory") pod "5ee021a5-0c14-460b-afdb-5b73f394355d" (UID: "5ee021a5-0c14-460b-afdb-5b73f394355d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.012034 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mvsmd\" (UniqueName: \"kubernetes.io/projected/5ee021a5-0c14-460b-afdb-5b73f394355d-kube-api-access-mvsmd\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.013944 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.014080 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5ee021a5-0c14-460b-afdb-5b73f394355d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.320791 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" event={"ID":"5ee021a5-0c14-460b-afdb-5b73f394355d","Type":"ContainerDied","Data":"4f1bfee4a1b3a2a3154b4c1f4b20bd2e6d2b0443f01f0e720221efea20f6a6bf"} Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.320850 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f1bfee4a1b3a2a3154b4c1f4b20bd2e6d2b0443f01f0e720221efea20f6a6bf" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.320976 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.405702 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl"] Oct 03 13:25:44 crc kubenswrapper[4868]: E1003 13:25:44.406783 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ee021a5-0c14-460b-afdb-5b73f394355d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.406811 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ee021a5-0c14-460b-afdb-5b73f394355d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.407039 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ee021a5-0c14-460b-afdb-5b73f394355d" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.407962 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.410618 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.411134 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.411197 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.411385 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.411462 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.411653 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.414249 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.414428 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.437978 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438051 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbxn9\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-kube-api-access-rbxn9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438121 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438151 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438224 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438257 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438309 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438344 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438375 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438488 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438528 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438562 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438645 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.438678 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.440033 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl"] Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.540934 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.540994 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541030 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541061 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbxn9\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-kube-api-access-rbxn9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541110 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541141 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541174 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541202 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541250 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541278 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541301 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541376 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541403 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.541435 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.548010 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.549324 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.549510 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.549573 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.549526 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.549865 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.550328 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.550709 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.550990 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.551847 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.553984 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.555425 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.562027 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbxn9\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-kube-api-access-rbxn9\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.563501 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:44 crc kubenswrapper[4868]: I1003 13:25:44.731232 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:25:45 crc kubenswrapper[4868]: I1003 13:25:45.286278 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl"] Oct 03 13:25:45 crc kubenswrapper[4868]: I1003 13:25:45.329722 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" event={"ID":"831c7a15-2c7a-4e7c-9908-65261a142070","Type":"ContainerStarted","Data":"e36121ee3ecc5e495318399fe4d0766de295f536de98af4f85cad530ebc052c4"} Oct 03 13:25:46 crc kubenswrapper[4868]: I1003 13:25:46.341613 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" event={"ID":"831c7a15-2c7a-4e7c-9908-65261a142070","Type":"ContainerStarted","Data":"fbcb06d7af4363aae538c3fd922dd64c1c65408a3704c71aa2b9870808d84270"} Oct 03 13:25:46 crc kubenswrapper[4868]: I1003 13:25:46.370961 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" podStartSLOduration=1.821827478 podStartE2EDuration="2.370913092s" podCreationTimestamp="2025-10-03 13:25:44 +0000 UTC" firstStartedPulling="2025-10-03 13:25:45.298958502 +0000 UTC m=+2141.508807568" lastFinishedPulling="2025-10-03 13:25:45.848044116 +0000 UTC m=+2142.057893182" observedRunningTime="2025-10-03 13:25:46.362202157 +0000 UTC m=+2142.572051243" watchObservedRunningTime="2025-10-03 13:25:46.370913092 +0000 UTC m=+2142.580762158" Oct 03 13:26:24 crc kubenswrapper[4868]: I1003 13:26:24.726714 4868 generic.go:334] "Generic (PLEG): container finished" podID="831c7a15-2c7a-4e7c-9908-65261a142070" containerID="fbcb06d7af4363aae538c3fd922dd64c1c65408a3704c71aa2b9870808d84270" exitCode=0 Oct 03 13:26:24 crc kubenswrapper[4868]: I1003 13:26:24.726862 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" event={"ID":"831c7a15-2c7a-4e7c-9908-65261a142070","Type":"ContainerDied","Data":"fbcb06d7af4363aae538c3fd922dd64c1c65408a3704c71aa2b9870808d84270"} Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.212986 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.378872 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.378983 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbxn9\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-kube-api-access-rbxn9\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379303 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-libvirt-combined-ca-bundle\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379420 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379472 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379542 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-ovn-default-certs-0\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379650 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-repo-setup-combined-ca-bundle\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379691 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ssh-key\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379769 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ovn-combined-ca-bundle\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379855 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-nova-combined-ca-bundle\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379903 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-inventory\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.379956 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-telemetry-combined-ca-bundle\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.380033 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-neutron-metadata-combined-ca-bundle\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.380130 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-bootstrap-combined-ca-bundle\") pod \"831c7a15-2c7a-4e7c-9908-65261a142070\" (UID: \"831c7a15-2c7a-4e7c-9908-65261a142070\") " Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.387455 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.389134 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.389184 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.389159 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.389228 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-kube-api-access-rbxn9" (OuterVolumeSpecName: "kube-api-access-rbxn9") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "kube-api-access-rbxn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.390010 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.390017 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.390480 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.391706 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.393164 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.393182 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.393237 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.415960 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.420381 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-inventory" (OuterVolumeSpecName: "inventory") pod "831c7a15-2c7a-4e7c-9908-65261a142070" (UID: "831c7a15-2c7a-4e7c-9908-65261a142070"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484739 4868 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484776 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbxn9\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-kube-api-access-rbxn9\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484790 4868 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484801 4868 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484812 4868 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484823 4868 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/831c7a15-2c7a-4e7c-9908-65261a142070-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484833 4868 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484844 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484856 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484867 4868 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484876 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484886 4868 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484895 4868 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.484907 4868 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/831c7a15-2c7a-4e7c-9908-65261a142070-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.747068 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" event={"ID":"831c7a15-2c7a-4e7c-9908-65261a142070","Type":"ContainerDied","Data":"e36121ee3ecc5e495318399fe4d0766de295f536de98af4f85cad530ebc052c4"} Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.747112 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e36121ee3ecc5e495318399fe4d0766de295f536de98af4f85cad530ebc052c4" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.747186 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.870324 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz"] Oct 03 13:26:26 crc kubenswrapper[4868]: E1003 13:26:26.871407 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="831c7a15-2c7a-4e7c-9908-65261a142070" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.871445 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="831c7a15-2c7a-4e7c-9908-65261a142070" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.871794 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="831c7a15-2c7a-4e7c-9908-65261a142070" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.872943 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.875632 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.875673 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.875992 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.876402 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.881276 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 13:26:26 crc kubenswrapper[4868]: I1003 13:26:26.882829 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz"] Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.000594 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/eda000c8-8118-48c9-ac7b-42353619ea8d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.001583 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.001704 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.001841 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ttkx4\" (UniqueName: \"kubernetes.io/projected/eda000c8-8118-48c9-ac7b-42353619ea8d-kube-api-access-ttkx4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.001935 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.105675 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/eda000c8-8118-48c9-ac7b-42353619ea8d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.105779 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.105830 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.105869 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ttkx4\" (UniqueName: \"kubernetes.io/projected/eda000c8-8118-48c9-ac7b-42353619ea8d-kube-api-access-ttkx4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.105905 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.106876 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/eda000c8-8118-48c9-ac7b-42353619ea8d-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.112179 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.112694 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.117093 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.125690 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ttkx4\" (UniqueName: \"kubernetes.io/projected/eda000c8-8118-48c9-ac7b-42353619ea8d-kube-api-access-ttkx4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-5tcvz\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.208144 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:26:27 crc kubenswrapper[4868]: I1003 13:26:27.825768 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz"] Oct 03 13:26:28 crc kubenswrapper[4868]: I1003 13:26:28.766250 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" event={"ID":"eda000c8-8118-48c9-ac7b-42353619ea8d","Type":"ContainerStarted","Data":"341086976f23ec5c56ce1686e0a466bc84dcecd7121e20d84f2804e66b3752d5"} Oct 03 13:26:28 crc kubenswrapper[4868]: I1003 13:26:28.766672 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" event={"ID":"eda000c8-8118-48c9-ac7b-42353619ea8d","Type":"ContainerStarted","Data":"7e21e68ce665821841e52a9f06db955360ee56ff0b10b0156f08e69371b73f65"} Oct 03 13:26:28 crc kubenswrapper[4868]: I1003 13:26:28.783830 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" podStartSLOduration=2.644107558 podStartE2EDuration="2.783805599s" podCreationTimestamp="2025-10-03 13:26:26 +0000 UTC" firstStartedPulling="2025-10-03 13:26:27.827712448 +0000 UTC m=+2184.037561514" lastFinishedPulling="2025-10-03 13:26:27.967410489 +0000 UTC m=+2184.177259555" observedRunningTime="2025-10-03 13:26:28.782552635 +0000 UTC m=+2184.992401721" watchObservedRunningTime="2025-10-03 13:26:28.783805599 +0000 UTC m=+2184.993654685" Oct 03 13:27:02 crc kubenswrapper[4868]: I1003 13:27:02.075453 4868 generic.go:334] "Generic (PLEG): container finished" podID="eda000c8-8118-48c9-ac7b-42353619ea8d" containerID="341086976f23ec5c56ce1686e0a466bc84dcecd7121e20d84f2804e66b3752d5" exitCode=2 Oct 03 13:27:02 crc kubenswrapper[4868]: I1003 13:27:02.075512 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" event={"ID":"eda000c8-8118-48c9-ac7b-42353619ea8d","Type":"ContainerDied","Data":"341086976f23ec5c56ce1686e0a466bc84dcecd7121e20d84f2804e66b3752d5"} Oct 03 13:27:02 crc kubenswrapper[4868]: I1003 13:27:02.146187 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:27:02 crc kubenswrapper[4868]: I1003 13:27:02.146288 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.539171 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.563637 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ttkx4\" (UniqueName: \"kubernetes.io/projected/eda000c8-8118-48c9-ac7b-42353619ea8d-kube-api-access-ttkx4\") pod \"eda000c8-8118-48c9-ac7b-42353619ea8d\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.563916 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ssh-key\") pod \"eda000c8-8118-48c9-ac7b-42353619ea8d\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.564181 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-inventory\") pod \"eda000c8-8118-48c9-ac7b-42353619ea8d\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.564242 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ovn-combined-ca-bundle\") pod \"eda000c8-8118-48c9-ac7b-42353619ea8d\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.564293 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/eda000c8-8118-48c9-ac7b-42353619ea8d-ovncontroller-config-0\") pod \"eda000c8-8118-48c9-ac7b-42353619ea8d\" (UID: \"eda000c8-8118-48c9-ac7b-42353619ea8d\") " Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.572656 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eda000c8-8118-48c9-ac7b-42353619ea8d-kube-api-access-ttkx4" (OuterVolumeSpecName: "kube-api-access-ttkx4") pod "eda000c8-8118-48c9-ac7b-42353619ea8d" (UID: "eda000c8-8118-48c9-ac7b-42353619ea8d"). InnerVolumeSpecName "kube-api-access-ttkx4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.573569 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "eda000c8-8118-48c9-ac7b-42353619ea8d" (UID: "eda000c8-8118-48c9-ac7b-42353619ea8d"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.594256 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-inventory" (OuterVolumeSpecName: "inventory") pod "eda000c8-8118-48c9-ac7b-42353619ea8d" (UID: "eda000c8-8118-48c9-ac7b-42353619ea8d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.594580 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eda000c8-8118-48c9-ac7b-42353619ea8d-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "eda000c8-8118-48c9-ac7b-42353619ea8d" (UID: "eda000c8-8118-48c9-ac7b-42353619ea8d"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.602657 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "eda000c8-8118-48c9-ac7b-42353619ea8d" (UID: "eda000c8-8118-48c9-ac7b-42353619ea8d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.667225 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ttkx4\" (UniqueName: \"kubernetes.io/projected/eda000c8-8118-48c9-ac7b-42353619ea8d-kube-api-access-ttkx4\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.667259 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.667270 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.667281 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eda000c8-8118-48c9-ac7b-42353619ea8d-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:03 crc kubenswrapper[4868]: I1003 13:27:03.667292 4868 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/eda000c8-8118-48c9-ac7b-42353619ea8d-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:04 crc kubenswrapper[4868]: I1003 13:27:04.100687 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" event={"ID":"eda000c8-8118-48c9-ac7b-42353619ea8d","Type":"ContainerDied","Data":"7e21e68ce665821841e52a9f06db955360ee56ff0b10b0156f08e69371b73f65"} Oct 03 13:27:04 crc kubenswrapper[4868]: I1003 13:27:04.101242 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7e21e68ce665821841e52a9f06db955360ee56ff0b10b0156f08e69371b73f65" Oct 03 13:27:04 crc kubenswrapper[4868]: I1003 13:27:04.100777 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-5tcvz" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.086681 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8z4nr"] Oct 03 13:27:10 crc kubenswrapper[4868]: E1003 13:27:10.088363 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eda000c8-8118-48c9-ac7b-42353619ea8d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.088386 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="eda000c8-8118-48c9-ac7b-42353619ea8d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.088650 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="eda000c8-8118-48c9-ac7b-42353619ea8d" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.090845 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.099296 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8z4nr"] Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.123478 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-utilities\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.123616 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mg8z7\" (UniqueName: \"kubernetes.io/projected/6fbce676-0e69-4a41-8783-60f94a206e6a-kube-api-access-mg8z7\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.129867 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-catalog-content\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.232302 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-utilities\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.232390 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mg8z7\" (UniqueName: \"kubernetes.io/projected/6fbce676-0e69-4a41-8783-60f94a206e6a-kube-api-access-mg8z7\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.232443 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-catalog-content\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.233265 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-catalog-content\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.236932 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-utilities\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.258780 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mg8z7\" (UniqueName: \"kubernetes.io/projected/6fbce676-0e69-4a41-8783-60f94a206e6a-kube-api-access-mg8z7\") pod \"certified-operators-8z4nr\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.422781 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:10 crc kubenswrapper[4868]: I1003 13:27:10.775426 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8z4nr"] Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.034772 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh"] Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.036824 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.039192 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.039792 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.040769 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.041390 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.041400 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.049958 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mfnww\" (UniqueName: \"kubernetes.io/projected/5f27ba9e-0599-4586-a237-6df89c605a4b-kube-api-access-mfnww\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.050004 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f27ba9e-0599-4586-a237-6df89c605a4b-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.050115 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.050154 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.050201 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.057670 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh"] Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.151515 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mfnww\" (UniqueName: \"kubernetes.io/projected/5f27ba9e-0599-4586-a237-6df89c605a4b-kube-api-access-mfnww\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.151555 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f27ba9e-0599-4586-a237-6df89c605a4b-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.151648 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.151681 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.151720 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.152826 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f27ba9e-0599-4586-a237-6df89c605a4b-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.159506 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.159508 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.162447 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.169285 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mfnww\" (UniqueName: \"kubernetes.io/projected/5f27ba9e-0599-4586-a237-6df89c605a4b-kube-api-access-mfnww\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-989zh\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.189297 4868 generic.go:334] "Generic (PLEG): container finished" podID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerID="fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1" exitCode=0 Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.189361 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8z4nr" event={"ID":"6fbce676-0e69-4a41-8783-60f94a206e6a","Type":"ContainerDied","Data":"fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1"} Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.189400 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8z4nr" event={"ID":"6fbce676-0e69-4a41-8783-60f94a206e6a","Type":"ContainerStarted","Data":"64e1d38bb260ec3ac59f18822b6719e8ec389c99fe38aff237561b9479628233"} Oct 03 13:27:11 crc kubenswrapper[4868]: I1003 13:27:11.368090 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.081644 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh"] Oct 03 13:27:12 crc kubenswrapper[4868]: W1003 13:27:12.085851 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f27ba9e_0599_4586_a237_6df89c605a4b.slice/crio-130ae19fd4cc0754a76d1dfb7abc260bfa464ee7b7072a81cb09a6badd7b4729 WatchSource:0}: Error finding container 130ae19fd4cc0754a76d1dfb7abc260bfa464ee7b7072a81cb09a6badd7b4729: Status 404 returned error can't find the container with id 130ae19fd4cc0754a76d1dfb7abc260bfa464ee7b7072a81cb09a6badd7b4729 Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.203799 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8z4nr" event={"ID":"6fbce676-0e69-4a41-8783-60f94a206e6a","Type":"ContainerStarted","Data":"90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b"} Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.205800 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" event={"ID":"5f27ba9e-0599-4586-a237-6df89c605a4b","Type":"ContainerStarted","Data":"130ae19fd4cc0754a76d1dfb7abc260bfa464ee7b7072a81cb09a6badd7b4729"} Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.305693 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t4kx2"] Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.309600 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.318945 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4kx2"] Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.382195 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-utilities\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.382266 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-catalog-content\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.382355 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85mm5\" (UniqueName: \"kubernetes.io/projected/5fb86d4f-3626-4eae-b0ac-969a08a59213-kube-api-access-85mm5\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.478882 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rx4gv"] Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.482834 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.484482 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85mm5\" (UniqueName: \"kubernetes.io/projected/5fb86d4f-3626-4eae-b0ac-969a08a59213-kube-api-access-85mm5\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.484640 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-utilities\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.484681 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-catalog-content\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.485461 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-catalog-content\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.485563 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-utilities\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.501537 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx4gv"] Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.509548 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85mm5\" (UniqueName: \"kubernetes.io/projected/5fb86d4f-3626-4eae-b0ac-969a08a59213-kube-api-access-85mm5\") pod \"redhat-marketplace-t4kx2\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.586069 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-catalog-content\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.586394 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-utilities\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.586839 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f2dhh\" (UniqueName: \"kubernetes.io/projected/9f749c39-314b-4c58-9021-47917a0d18f5-kube-api-access-f2dhh\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.596000 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.689304 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-catalog-content\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.689435 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-utilities\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.689493 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f2dhh\" (UniqueName: \"kubernetes.io/projected/9f749c39-314b-4c58-9021-47917a0d18f5-kube-api-access-f2dhh\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.689855 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-catalog-content\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.690258 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-utilities\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.710872 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f2dhh\" (UniqueName: \"kubernetes.io/projected/9f749c39-314b-4c58-9021-47917a0d18f5-kube-api-access-f2dhh\") pod \"community-operators-rx4gv\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.903666 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:12 crc kubenswrapper[4868]: I1003 13:27:12.959797 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4kx2"] Oct 03 13:27:12 crc kubenswrapper[4868]: W1003 13:27:12.993828 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fb86d4f_3626_4eae_b0ac_969a08a59213.slice/crio-93b0145cebf0fa038359bac75352ca5be799c95ef434d022e0cbff1d8b5ce252 WatchSource:0}: Error finding container 93b0145cebf0fa038359bac75352ca5be799c95ef434d022e0cbff1d8b5ce252: Status 404 returned error can't find the container with id 93b0145cebf0fa038359bac75352ca5be799c95ef434d022e0cbff1d8b5ce252 Oct 03 13:27:13 crc kubenswrapper[4868]: I1003 13:27:13.252425 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4kx2" event={"ID":"5fb86d4f-3626-4eae-b0ac-969a08a59213","Type":"ContainerStarted","Data":"93b0145cebf0fa038359bac75352ca5be799c95ef434d022e0cbff1d8b5ce252"} Oct 03 13:27:13 crc kubenswrapper[4868]: I1003 13:27:13.258713 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" event={"ID":"5f27ba9e-0599-4586-a237-6df89c605a4b","Type":"ContainerStarted","Data":"64615ec93f04904fbacfae3378d0a125d35f50c6abb88c3cc8909143ef6817fd"} Oct 03 13:27:13 crc kubenswrapper[4868]: I1003 13:27:13.290422 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" podStartSLOduration=2.078834105 podStartE2EDuration="2.290390636s" podCreationTimestamp="2025-10-03 13:27:11 +0000 UTC" firstStartedPulling="2025-10-03 13:27:12.09147151 +0000 UTC m=+2228.301320576" lastFinishedPulling="2025-10-03 13:27:12.303028041 +0000 UTC m=+2228.512877107" observedRunningTime="2025-10-03 13:27:13.283895121 +0000 UTC m=+2229.493744197" watchObservedRunningTime="2025-10-03 13:27:13.290390636 +0000 UTC m=+2229.500239722" Oct 03 13:27:13 crc kubenswrapper[4868]: I1003 13:27:13.381691 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rx4gv"] Oct 03 13:27:13 crc kubenswrapper[4868]: W1003 13:27:13.395356 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f749c39_314b_4c58_9021_47917a0d18f5.slice/crio-8181bdca61def63505870c6d7473965e6308520c6408e1c78658085363dd8b5c WatchSource:0}: Error finding container 8181bdca61def63505870c6d7473965e6308520c6408e1c78658085363dd8b5c: Status 404 returned error can't find the container with id 8181bdca61def63505870c6d7473965e6308520c6408e1c78658085363dd8b5c Oct 03 13:27:14 crc kubenswrapper[4868]: I1003 13:27:14.268820 4868 generic.go:334] "Generic (PLEG): container finished" podID="9f749c39-314b-4c58-9021-47917a0d18f5" containerID="1ed4b528b1a5e6ca24aefce711c73324b5423aa042a619091414ba6a2d2f639d" exitCode=0 Oct 03 13:27:14 crc kubenswrapper[4868]: I1003 13:27:14.268872 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx4gv" event={"ID":"9f749c39-314b-4c58-9021-47917a0d18f5","Type":"ContainerDied","Data":"1ed4b528b1a5e6ca24aefce711c73324b5423aa042a619091414ba6a2d2f639d"} Oct 03 13:27:14 crc kubenswrapper[4868]: I1003 13:27:14.269268 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx4gv" event={"ID":"9f749c39-314b-4c58-9021-47917a0d18f5","Type":"ContainerStarted","Data":"8181bdca61def63505870c6d7473965e6308520c6408e1c78658085363dd8b5c"} Oct 03 13:27:14 crc kubenswrapper[4868]: I1003 13:27:14.275357 4868 generic.go:334] "Generic (PLEG): container finished" podID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerID="5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6" exitCode=0 Oct 03 13:27:14 crc kubenswrapper[4868]: I1003 13:27:14.275433 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4kx2" event={"ID":"5fb86d4f-3626-4eae-b0ac-969a08a59213","Type":"ContainerDied","Data":"5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6"} Oct 03 13:27:14 crc kubenswrapper[4868]: I1003 13:27:14.283872 4868 generic.go:334] "Generic (PLEG): container finished" podID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerID="90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b" exitCode=0 Oct 03 13:27:14 crc kubenswrapper[4868]: I1003 13:27:14.283993 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8z4nr" event={"ID":"6fbce676-0e69-4a41-8783-60f94a206e6a","Type":"ContainerDied","Data":"90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b"} Oct 03 13:27:17 crc kubenswrapper[4868]: I1003 13:27:17.317662 4868 generic.go:334] "Generic (PLEG): container finished" podID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerID="a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc" exitCode=0 Oct 03 13:27:17 crc kubenswrapper[4868]: I1003 13:27:17.317788 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4kx2" event={"ID":"5fb86d4f-3626-4eae-b0ac-969a08a59213","Type":"ContainerDied","Data":"a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc"} Oct 03 13:27:17 crc kubenswrapper[4868]: I1003 13:27:17.328607 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8z4nr" event={"ID":"6fbce676-0e69-4a41-8783-60f94a206e6a","Type":"ContainerStarted","Data":"1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c"} Oct 03 13:27:17 crc kubenswrapper[4868]: I1003 13:27:17.331705 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx4gv" event={"ID":"9f749c39-314b-4c58-9021-47917a0d18f5","Type":"ContainerStarted","Data":"3615d9ff6ee4bb53fee19c9e875973c8316a1f252332926954028dc72cbbc5eb"} Oct 03 13:27:17 crc kubenswrapper[4868]: I1003 13:27:17.383193 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8z4nr" podStartSLOduration=1.863068832 podStartE2EDuration="7.383164167s" podCreationTimestamp="2025-10-03 13:27:10 +0000 UTC" firstStartedPulling="2025-10-03 13:27:11.19213371 +0000 UTC m=+2227.401982776" lastFinishedPulling="2025-10-03 13:27:16.712229045 +0000 UTC m=+2232.922078111" observedRunningTime="2025-10-03 13:27:17.382211831 +0000 UTC m=+2233.592060927" watchObservedRunningTime="2025-10-03 13:27:17.383164167 +0000 UTC m=+2233.593013233" Oct 03 13:27:18 crc kubenswrapper[4868]: I1003 13:27:18.359095 4868 generic.go:334] "Generic (PLEG): container finished" podID="9f749c39-314b-4c58-9021-47917a0d18f5" containerID="3615d9ff6ee4bb53fee19c9e875973c8316a1f252332926954028dc72cbbc5eb" exitCode=0 Oct 03 13:27:18 crc kubenswrapper[4868]: I1003 13:27:18.359165 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx4gv" event={"ID":"9f749c39-314b-4c58-9021-47917a0d18f5","Type":"ContainerDied","Data":"3615d9ff6ee4bb53fee19c9e875973c8316a1f252332926954028dc72cbbc5eb"} Oct 03 13:27:19 crc kubenswrapper[4868]: I1003 13:27:19.374695 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4kx2" event={"ID":"5fb86d4f-3626-4eae-b0ac-969a08a59213","Type":"ContainerStarted","Data":"f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de"} Oct 03 13:27:19 crc kubenswrapper[4868]: I1003 13:27:19.403507 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t4kx2" podStartSLOduration=3.102780935 podStartE2EDuration="7.403472999s" podCreationTimestamp="2025-10-03 13:27:12 +0000 UTC" firstStartedPulling="2025-10-03 13:27:14.278376609 +0000 UTC m=+2230.488225675" lastFinishedPulling="2025-10-03 13:27:18.579068673 +0000 UTC m=+2234.788917739" observedRunningTime="2025-10-03 13:27:19.397857507 +0000 UTC m=+2235.607706593" watchObservedRunningTime="2025-10-03 13:27:19.403472999 +0000 UTC m=+2235.613322065" Oct 03 13:27:20 crc kubenswrapper[4868]: I1003 13:27:20.386307 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx4gv" event={"ID":"9f749c39-314b-4c58-9021-47917a0d18f5","Type":"ContainerStarted","Data":"d22fbbea678bd5fae51d92b1727c393b4c330d251ace1995d33782e16a19e691"} Oct 03 13:27:20 crc kubenswrapper[4868]: I1003 13:27:20.416499 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rx4gv" podStartSLOduration=3.444973953 podStartE2EDuration="8.416476267s" podCreationTimestamp="2025-10-03 13:27:12 +0000 UTC" firstStartedPulling="2025-10-03 13:27:14.270823245 +0000 UTC m=+2230.480672311" lastFinishedPulling="2025-10-03 13:27:19.242325558 +0000 UTC m=+2235.452174625" observedRunningTime="2025-10-03 13:27:20.407614237 +0000 UTC m=+2236.617463313" watchObservedRunningTime="2025-10-03 13:27:20.416476267 +0000 UTC m=+2236.626325333" Oct 03 13:27:20 crc kubenswrapper[4868]: I1003 13:27:20.423232 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:20 crc kubenswrapper[4868]: I1003 13:27:20.423697 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:21 crc kubenswrapper[4868]: I1003 13:27:21.529737 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-8z4nr" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="registry-server" probeResult="failure" output=< Oct 03 13:27:21 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 13:27:21 crc kubenswrapper[4868]: > Oct 03 13:27:22 crc kubenswrapper[4868]: I1003 13:27:22.596238 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:22 crc kubenswrapper[4868]: I1003 13:27:22.596317 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:22 crc kubenswrapper[4868]: I1003 13:27:22.646502 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:22 crc kubenswrapper[4868]: I1003 13:27:22.904227 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:22 crc kubenswrapper[4868]: I1003 13:27:22.904280 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:23 crc kubenswrapper[4868]: I1003 13:27:23.470907 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:23 crc kubenswrapper[4868]: I1003 13:27:23.950488 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-rx4gv" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="registry-server" probeResult="failure" output=< Oct 03 13:27:23 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 13:27:23 crc kubenswrapper[4868]: > Oct 03 13:27:24 crc kubenswrapper[4868]: I1003 13:27:24.061455 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4kx2"] Oct 03 13:27:25 crc kubenswrapper[4868]: I1003 13:27:25.436325 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-t4kx2" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="registry-server" containerID="cri-o://f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de" gracePeriod=2 Oct 03 13:27:25 crc kubenswrapper[4868]: I1003 13:27:25.994128 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.117978 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-utilities\") pod \"5fb86d4f-3626-4eae-b0ac-969a08a59213\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.118164 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-85mm5\" (UniqueName: \"kubernetes.io/projected/5fb86d4f-3626-4eae-b0ac-969a08a59213-kube-api-access-85mm5\") pod \"5fb86d4f-3626-4eae-b0ac-969a08a59213\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.118327 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-catalog-content\") pod \"5fb86d4f-3626-4eae-b0ac-969a08a59213\" (UID: \"5fb86d4f-3626-4eae-b0ac-969a08a59213\") " Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.119236 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-utilities" (OuterVolumeSpecName: "utilities") pod "5fb86d4f-3626-4eae-b0ac-969a08a59213" (UID: "5fb86d4f-3626-4eae-b0ac-969a08a59213"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.125323 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fb86d4f-3626-4eae-b0ac-969a08a59213-kube-api-access-85mm5" (OuterVolumeSpecName: "kube-api-access-85mm5") pod "5fb86d4f-3626-4eae-b0ac-969a08a59213" (UID: "5fb86d4f-3626-4eae-b0ac-969a08a59213"). InnerVolumeSpecName "kube-api-access-85mm5". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.131236 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5fb86d4f-3626-4eae-b0ac-969a08a59213" (UID: "5fb86d4f-3626-4eae-b0ac-969a08a59213"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.221739 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-85mm5\" (UniqueName: \"kubernetes.io/projected/5fb86d4f-3626-4eae-b0ac-969a08a59213-kube-api-access-85mm5\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.221779 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.221792 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5fb86d4f-3626-4eae-b0ac-969a08a59213-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.475524 4868 generic.go:334] "Generic (PLEG): container finished" podID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerID="f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de" exitCode=0 Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.475580 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4kx2" event={"ID":"5fb86d4f-3626-4eae-b0ac-969a08a59213","Type":"ContainerDied","Data":"f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de"} Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.475589 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t4kx2" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.475613 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t4kx2" event={"ID":"5fb86d4f-3626-4eae-b0ac-969a08a59213","Type":"ContainerDied","Data":"93b0145cebf0fa038359bac75352ca5be799c95ef434d022e0cbff1d8b5ce252"} Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.475633 4868 scope.go:117] "RemoveContainer" containerID="f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.528092 4868 scope.go:117] "RemoveContainer" containerID="a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.533842 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4kx2"] Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.543937 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-t4kx2"] Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.551803 4868 scope.go:117] "RemoveContainer" containerID="5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.558484 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" path="/var/lib/kubelet/pods/5fb86d4f-3626-4eae-b0ac-969a08a59213/volumes" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.602301 4868 scope.go:117] "RemoveContainer" containerID="f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de" Oct 03 13:27:26 crc kubenswrapper[4868]: E1003 13:27:26.603385 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de\": container with ID starting with f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de not found: ID does not exist" containerID="f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.603561 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de"} err="failed to get container status \"f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de\": rpc error: code = NotFound desc = could not find container \"f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de\": container with ID starting with f4e57de5ccb78c85998799d8868faba2962c1c3ea08454ffd2bb7a372e5c73de not found: ID does not exist" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.603715 4868 scope.go:117] "RemoveContainer" containerID="a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc" Oct 03 13:27:26 crc kubenswrapper[4868]: E1003 13:27:26.604550 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc\": container with ID starting with a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc not found: ID does not exist" containerID="a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.604703 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc"} err="failed to get container status \"a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc\": rpc error: code = NotFound desc = could not find container \"a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc\": container with ID starting with a2222339300e45a3e3ed5ce201d02d1e8bc23a6e22d6f4965e46ce1362198ffc not found: ID does not exist" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.604826 4868 scope.go:117] "RemoveContainer" containerID="5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6" Oct 03 13:27:26 crc kubenswrapper[4868]: E1003 13:27:26.605284 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6\": container with ID starting with 5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6 not found: ID does not exist" containerID="5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6" Oct 03 13:27:26 crc kubenswrapper[4868]: I1003 13:27:26.605970 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6"} err="failed to get container status \"5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6\": rpc error: code = NotFound desc = could not find container \"5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6\": container with ID starting with 5c33d22677698500b9f05e3fa2cad9122782a2f513d32bb523587ab5fd3456d6 not found: ID does not exist" Oct 03 13:27:30 crc kubenswrapper[4868]: I1003 13:27:30.476462 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:30 crc kubenswrapper[4868]: I1003 13:27:30.528223 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:30 crc kubenswrapper[4868]: I1003 13:27:30.723967 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8z4nr"] Oct 03 13:27:31 crc kubenswrapper[4868]: I1003 13:27:31.530512 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-8z4nr" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="registry-server" containerID="cri-o://1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c" gracePeriod=2 Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.035184 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.145646 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.145753 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.161652 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-catalog-content\") pod \"6fbce676-0e69-4a41-8783-60f94a206e6a\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.161728 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg8z7\" (UniqueName: \"kubernetes.io/projected/6fbce676-0e69-4a41-8783-60f94a206e6a-kube-api-access-mg8z7\") pod \"6fbce676-0e69-4a41-8783-60f94a206e6a\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.161778 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-utilities\") pod \"6fbce676-0e69-4a41-8783-60f94a206e6a\" (UID: \"6fbce676-0e69-4a41-8783-60f94a206e6a\") " Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.162750 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-utilities" (OuterVolumeSpecName: "utilities") pod "6fbce676-0e69-4a41-8783-60f94a206e6a" (UID: "6fbce676-0e69-4a41-8783-60f94a206e6a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.163153 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.170986 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fbce676-0e69-4a41-8783-60f94a206e6a-kube-api-access-mg8z7" (OuterVolumeSpecName: "kube-api-access-mg8z7") pod "6fbce676-0e69-4a41-8783-60f94a206e6a" (UID: "6fbce676-0e69-4a41-8783-60f94a206e6a"). InnerVolumeSpecName "kube-api-access-mg8z7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.228267 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6fbce676-0e69-4a41-8783-60f94a206e6a" (UID: "6fbce676-0e69-4a41-8783-60f94a206e6a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.265672 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6fbce676-0e69-4a41-8783-60f94a206e6a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.265714 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg8z7\" (UniqueName: \"kubernetes.io/projected/6fbce676-0e69-4a41-8783-60f94a206e6a-kube-api-access-mg8z7\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.544441 4868 generic.go:334] "Generic (PLEG): container finished" podID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerID="1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c" exitCode=0 Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.544555 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8z4nr" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.556857 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8z4nr" event={"ID":"6fbce676-0e69-4a41-8783-60f94a206e6a","Type":"ContainerDied","Data":"1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c"} Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.556909 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8z4nr" event={"ID":"6fbce676-0e69-4a41-8783-60f94a206e6a","Type":"ContainerDied","Data":"64e1d38bb260ec3ac59f18822b6719e8ec389c99fe38aff237561b9479628233"} Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.556935 4868 scope.go:117] "RemoveContainer" containerID="1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.590743 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-8z4nr"] Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.599737 4868 scope.go:117] "RemoveContainer" containerID="90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.603416 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-8z4nr"] Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.640523 4868 scope.go:117] "RemoveContainer" containerID="fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.687386 4868 scope.go:117] "RemoveContainer" containerID="1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c" Oct 03 13:27:32 crc kubenswrapper[4868]: E1003 13:27:32.688639 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c\": container with ID starting with 1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c not found: ID does not exist" containerID="1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.688695 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c"} err="failed to get container status \"1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c\": rpc error: code = NotFound desc = could not find container \"1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c\": container with ID starting with 1c9b2f9d3a1ebcdeaa37a6a5e0075482447341f7f255fcfc1fe7e127e3e8523c not found: ID does not exist" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.688725 4868 scope.go:117] "RemoveContainer" containerID="90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b" Oct 03 13:27:32 crc kubenswrapper[4868]: E1003 13:27:32.691765 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b\": container with ID starting with 90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b not found: ID does not exist" containerID="90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.691833 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b"} err="failed to get container status \"90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b\": rpc error: code = NotFound desc = could not find container \"90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b\": container with ID starting with 90475a4b9622b13429ee5cf937408b493410b0f0d702888115bab7ccf1b2a87b not found: ID does not exist" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.691881 4868 scope.go:117] "RemoveContainer" containerID="fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1" Oct 03 13:27:32 crc kubenswrapper[4868]: E1003 13:27:32.692601 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1\": container with ID starting with fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1 not found: ID does not exist" containerID="fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.692682 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1"} err="failed to get container status \"fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1\": rpc error: code = NotFound desc = could not find container \"fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1\": container with ID starting with fc726ea1018da091f76f4ace0a94aca55afcb9bba9720e630a52258adfebe8f1 not found: ID does not exist" Oct 03 13:27:32 crc kubenswrapper[4868]: I1003 13:27:32.968038 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:33 crc kubenswrapper[4868]: I1003 13:27:33.030261 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:34 crc kubenswrapper[4868]: I1003 13:27:34.556865 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" path="/var/lib/kubelet/pods/6fbce676-0e69-4a41-8783-60f94a206e6a/volumes" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.123445 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx4gv"] Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.124451 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rx4gv" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="registry-server" containerID="cri-o://d22fbbea678bd5fae51d92b1727c393b4c330d251ace1995d33782e16a19e691" gracePeriod=2 Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.582578 4868 generic.go:334] "Generic (PLEG): container finished" podID="9f749c39-314b-4c58-9021-47917a0d18f5" containerID="d22fbbea678bd5fae51d92b1727c393b4c330d251ace1995d33782e16a19e691" exitCode=0 Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.582643 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx4gv" event={"ID":"9f749c39-314b-4c58-9021-47917a0d18f5","Type":"ContainerDied","Data":"d22fbbea678bd5fae51d92b1727c393b4c330d251ace1995d33782e16a19e691"} Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.582703 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rx4gv" event={"ID":"9f749c39-314b-4c58-9021-47917a0d18f5","Type":"ContainerDied","Data":"8181bdca61def63505870c6d7473965e6308520c6408e1c78658085363dd8b5c"} Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.582722 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8181bdca61def63505870c6d7473965e6308520c6408e1c78658085363dd8b5c" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.622760 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.750010 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f2dhh\" (UniqueName: \"kubernetes.io/projected/9f749c39-314b-4c58-9021-47917a0d18f5-kube-api-access-f2dhh\") pod \"9f749c39-314b-4c58-9021-47917a0d18f5\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.750383 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-utilities\") pod \"9f749c39-314b-4c58-9021-47917a0d18f5\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.750624 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-catalog-content\") pod \"9f749c39-314b-4c58-9021-47917a0d18f5\" (UID: \"9f749c39-314b-4c58-9021-47917a0d18f5\") " Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.753807 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-utilities" (OuterVolumeSpecName: "utilities") pod "9f749c39-314b-4c58-9021-47917a0d18f5" (UID: "9f749c39-314b-4c58-9021-47917a0d18f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.768358 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f749c39-314b-4c58-9021-47917a0d18f5-kube-api-access-f2dhh" (OuterVolumeSpecName: "kube-api-access-f2dhh") pod "9f749c39-314b-4c58-9021-47917a0d18f5" (UID: "9f749c39-314b-4c58-9021-47917a0d18f5"). InnerVolumeSpecName "kube-api-access-f2dhh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.852333 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f2dhh\" (UniqueName: \"kubernetes.io/projected/9f749c39-314b-4c58-9021-47917a0d18f5-kube-api-access-f2dhh\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.852364 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.874089 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9f749c39-314b-4c58-9021-47917a0d18f5" (UID: "9f749c39-314b-4c58-9021-47917a0d18f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:27:35 crc kubenswrapper[4868]: I1003 13:27:35.953756 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f749c39-314b-4c58-9021-47917a0d18f5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:36 crc kubenswrapper[4868]: I1003 13:27:36.592110 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rx4gv" Oct 03 13:27:36 crc kubenswrapper[4868]: I1003 13:27:36.620641 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rx4gv"] Oct 03 13:27:36 crc kubenswrapper[4868]: I1003 13:27:36.628608 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rx4gv"] Oct 03 13:27:38 crc kubenswrapper[4868]: I1003 13:27:38.557894 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" path="/var/lib/kubelet/pods/9f749c39-314b-4c58-9021-47917a0d18f5/volumes" Oct 03 13:27:46 crc kubenswrapper[4868]: I1003 13:27:46.702223 4868 generic.go:334] "Generic (PLEG): container finished" podID="5f27ba9e-0599-4586-a237-6df89c605a4b" containerID="64615ec93f04904fbacfae3378d0a125d35f50c6abb88c3cc8909143ef6817fd" exitCode=2 Oct 03 13:27:46 crc kubenswrapper[4868]: I1003 13:27:46.702286 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" event={"ID":"5f27ba9e-0599-4586-a237-6df89c605a4b","Type":"ContainerDied","Data":"64615ec93f04904fbacfae3378d0a125d35f50c6abb88c3cc8909143ef6817fd"} Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.160763 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.325953 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f27ba9e-0599-4586-a237-6df89c605a4b-ovncontroller-config-0\") pod \"5f27ba9e-0599-4586-a237-6df89c605a4b\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.326113 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mfnww\" (UniqueName: \"kubernetes.io/projected/5f27ba9e-0599-4586-a237-6df89c605a4b-kube-api-access-mfnww\") pod \"5f27ba9e-0599-4586-a237-6df89c605a4b\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.326179 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ovn-combined-ca-bundle\") pod \"5f27ba9e-0599-4586-a237-6df89c605a4b\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.326238 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-inventory\") pod \"5f27ba9e-0599-4586-a237-6df89c605a4b\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.326391 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ssh-key\") pod \"5f27ba9e-0599-4586-a237-6df89c605a4b\" (UID: \"5f27ba9e-0599-4586-a237-6df89c605a4b\") " Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.331968 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "5f27ba9e-0599-4586-a237-6df89c605a4b" (UID: "5f27ba9e-0599-4586-a237-6df89c605a4b"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.333913 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f27ba9e-0599-4586-a237-6df89c605a4b-kube-api-access-mfnww" (OuterVolumeSpecName: "kube-api-access-mfnww") pod "5f27ba9e-0599-4586-a237-6df89c605a4b" (UID: "5f27ba9e-0599-4586-a237-6df89c605a4b"). InnerVolumeSpecName "kube-api-access-mfnww". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.355853 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f27ba9e-0599-4586-a237-6df89c605a4b-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "5f27ba9e-0599-4586-a237-6df89c605a4b" (UID: "5f27ba9e-0599-4586-a237-6df89c605a4b"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.357030 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "5f27ba9e-0599-4586-a237-6df89c605a4b" (UID: "5f27ba9e-0599-4586-a237-6df89c605a4b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.357454 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-inventory" (OuterVolumeSpecName: "inventory") pod "5f27ba9e-0599-4586-a237-6df89c605a4b" (UID: "5f27ba9e-0599-4586-a237-6df89c605a4b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.428861 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mfnww\" (UniqueName: \"kubernetes.io/projected/5f27ba9e-0599-4586-a237-6df89c605a4b-kube-api-access-mfnww\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.428906 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.428920 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.428931 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/5f27ba9e-0599-4586-a237-6df89c605a4b-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.428942 4868 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/5f27ba9e-0599-4586-a237-6df89c605a4b-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.724144 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" event={"ID":"5f27ba9e-0599-4586-a237-6df89c605a4b","Type":"ContainerDied","Data":"130ae19fd4cc0754a76d1dfb7abc260bfa464ee7b7072a81cb09a6badd7b4729"} Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.724528 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="130ae19fd4cc0754a76d1dfb7abc260bfa464ee7b7072a81cb09a6badd7b4729" Oct 03 13:27:48 crc kubenswrapper[4868]: I1003 13:27:48.724294 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-989zh" Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.145506 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.146436 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.146514 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.147295 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.147405 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" gracePeriod=600 Oct 03 13:28:02 crc kubenswrapper[4868]: E1003 13:28:02.269922 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.875789 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" exitCode=0 Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.875862 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11"} Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.876240 4868 scope.go:117] "RemoveContainer" containerID="74f4ebe53929f202105f99fe9a6e9f2a2ffdd2b6bf57fa49540987a75c9833e2" Oct 03 13:28:02 crc kubenswrapper[4868]: I1003 13:28:02.877835 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:28:02 crc kubenswrapper[4868]: E1003 13:28:02.878348 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.034404 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn"] Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035285 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="extract-utilities" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035352 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="extract-utilities" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035455 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035469 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035499 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="extract-content" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035505 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="extract-content" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035526 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="extract-utilities" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035533 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="extract-utilities" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035542 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035548 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035578 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035584 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035604 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="extract-utilities" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035610 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="extract-utilities" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035637 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f27ba9e-0599-4586-a237-6df89c605a4b" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035644 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f27ba9e-0599-4586-a237-6df89c605a4b" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035667 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="extract-content" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035674 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="extract-content" Oct 03 13:28:06 crc kubenswrapper[4868]: E1003 13:28:06.035686 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="extract-content" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.035692 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="extract-content" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.036286 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fbce676-0e69-4a41-8783-60f94a206e6a" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.036310 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f27ba9e-0599-4586-a237-6df89c605a4b" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.036325 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f749c39-314b-4c58-9021-47917a0d18f5" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.036339 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fb86d4f-3626-4eae-b0ac-969a08a59213" containerName="registry-server" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.037346 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.040126 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.040552 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.041073 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.041131 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.041698 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.055261 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn"] Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.134300 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.134383 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.134678 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.135160 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.135606 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5q7lz\" (UniqueName: \"kubernetes.io/projected/e95bbe25-a27c-466d-8d6b-bf2d745a6429-kube-api-access-5q7lz\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.237492 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.237599 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5q7lz\" (UniqueName: \"kubernetes.io/projected/e95bbe25-a27c-466d-8d6b-bf2d745a6429-kube-api-access-5q7lz\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.237679 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.237702 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.237741 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.239087 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.246167 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.246554 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.261004 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.263888 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5q7lz\" (UniqueName: \"kubernetes.io/projected/e95bbe25-a27c-466d-8d6b-bf2d745a6429-kube-api-access-5q7lz\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-h8tkn\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:06 crc kubenswrapper[4868]: I1003 13:28:06.368472 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:07 crc kubenswrapper[4868]: I1003 13:28:07.036948 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn"] Oct 03 13:28:07 crc kubenswrapper[4868]: I1003 13:28:07.940089 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" event={"ID":"e95bbe25-a27c-466d-8d6b-bf2d745a6429","Type":"ContainerStarted","Data":"58cafdc7f61beaebab77e2a87cd1317e659b8261057454687f4a14b8125fff36"} Oct 03 13:28:07 crc kubenswrapper[4868]: I1003 13:28:07.940158 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" event={"ID":"e95bbe25-a27c-466d-8d6b-bf2d745a6429","Type":"ContainerStarted","Data":"bfbf2cce46349c7ffcffac82a1bf7d665da831f094ed8a59280bd91a3a6f7211"} Oct 03 13:28:07 crc kubenswrapper[4868]: I1003 13:28:07.967849 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" podStartSLOduration=1.779828046 podStartE2EDuration="1.967822181s" podCreationTimestamp="2025-10-03 13:28:06 +0000 UTC" firstStartedPulling="2025-10-03 13:28:07.058973186 +0000 UTC m=+2283.268822252" lastFinishedPulling="2025-10-03 13:28:07.246967321 +0000 UTC m=+2283.456816387" observedRunningTime="2025-10-03 13:28:07.964693717 +0000 UTC m=+2284.174542783" watchObservedRunningTime="2025-10-03 13:28:07.967822181 +0000 UTC m=+2284.177671257" Oct 03 13:28:17 crc kubenswrapper[4868]: I1003 13:28:17.544976 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:28:17 crc kubenswrapper[4868]: E1003 13:28:17.546027 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:28:30 crc kubenswrapper[4868]: I1003 13:28:30.544749 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:28:30 crc kubenswrapper[4868]: E1003 13:28:30.545764 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:28:39 crc kubenswrapper[4868]: I1003 13:28:39.269374 4868 generic.go:334] "Generic (PLEG): container finished" podID="e95bbe25-a27c-466d-8d6b-bf2d745a6429" containerID="58cafdc7f61beaebab77e2a87cd1317e659b8261057454687f4a14b8125fff36" exitCode=2 Oct 03 13:28:39 crc kubenswrapper[4868]: I1003 13:28:39.269474 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" event={"ID":"e95bbe25-a27c-466d-8d6b-bf2d745a6429","Type":"ContainerDied","Data":"58cafdc7f61beaebab77e2a87cd1317e659b8261057454687f4a14b8125fff36"} Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.761090 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.934073 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-inventory\") pod \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.934259 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovncontroller-config-0\") pod \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.934652 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5q7lz\" (UniqueName: \"kubernetes.io/projected/e95bbe25-a27c-466d-8d6b-bf2d745a6429-kube-api-access-5q7lz\") pod \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.934696 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ssh-key\") pod \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.934804 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovn-combined-ca-bundle\") pod \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\" (UID: \"e95bbe25-a27c-466d-8d6b-bf2d745a6429\") " Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.951373 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "e95bbe25-a27c-466d-8d6b-bf2d745a6429" (UID: "e95bbe25-a27c-466d-8d6b-bf2d745a6429"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.951419 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e95bbe25-a27c-466d-8d6b-bf2d745a6429-kube-api-access-5q7lz" (OuterVolumeSpecName: "kube-api-access-5q7lz") pod "e95bbe25-a27c-466d-8d6b-bf2d745a6429" (UID: "e95bbe25-a27c-466d-8d6b-bf2d745a6429"). InnerVolumeSpecName "kube-api-access-5q7lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.966631 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-inventory" (OuterVolumeSpecName: "inventory") pod "e95bbe25-a27c-466d-8d6b-bf2d745a6429" (UID: "e95bbe25-a27c-466d-8d6b-bf2d745a6429"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.967344 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "e95bbe25-a27c-466d-8d6b-bf2d745a6429" (UID: "e95bbe25-a27c-466d-8d6b-bf2d745a6429"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:28:40 crc kubenswrapper[4868]: I1003 13:28:40.984600 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e95bbe25-a27c-466d-8d6b-bf2d745a6429" (UID: "e95bbe25-a27c-466d-8d6b-bf2d745a6429"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.038736 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5q7lz\" (UniqueName: \"kubernetes.io/projected/e95bbe25-a27c-466d-8d6b-bf2d745a6429-kube-api-access-5q7lz\") on node \"crc\" DevicePath \"\"" Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.038775 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.038788 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.038797 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e95bbe25-a27c-466d-8d6b-bf2d745a6429-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.038806 4868 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/e95bbe25-a27c-466d-8d6b-bf2d745a6429-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.292895 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" event={"ID":"e95bbe25-a27c-466d-8d6b-bf2d745a6429","Type":"ContainerDied","Data":"bfbf2cce46349c7ffcffac82a1bf7d665da831f094ed8a59280bd91a3a6f7211"} Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.292950 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bfbf2cce46349c7ffcffac82a1bf7d665da831f094ed8a59280bd91a3a6f7211" Oct 03 13:28:41 crc kubenswrapper[4868]: I1003 13:28:41.293027 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-h8tkn" Oct 03 13:28:45 crc kubenswrapper[4868]: I1003 13:28:45.544700 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:28:45 crc kubenswrapper[4868]: E1003 13:28:45.545834 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:28:56 crc kubenswrapper[4868]: I1003 13:28:56.544023 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:28:56 crc kubenswrapper[4868]: E1003 13:28:56.544891 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:29:08 crc kubenswrapper[4868]: I1003 13:29:08.544491 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:29:08 crc kubenswrapper[4868]: E1003 13:29:08.545538 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.037628 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4"] Oct 03 13:29:18 crc kubenswrapper[4868]: E1003 13:29:18.038869 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95bbe25-a27c-466d-8d6b-bf2d745a6429" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.038883 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95bbe25-a27c-466d-8d6b-bf2d745a6429" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.039115 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="e95bbe25-a27c-466d-8d6b-bf2d745a6429" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.039825 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.046977 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.048881 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.049206 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.049380 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.051633 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.060323 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4"] Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.193762 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.194231 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kcqkj\" (UniqueName: \"kubernetes.io/projected/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-kube-api-access-kcqkj\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.194287 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.194392 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.194517 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.298267 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.298634 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kcqkj\" (UniqueName: \"kubernetes.io/projected/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-kube-api-access-kcqkj\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.298664 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.298713 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.298744 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.301604 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.306975 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.306981 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.308888 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.317355 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kcqkj\" (UniqueName: \"kubernetes.io/projected/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-kube-api-access-kcqkj\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-26cn4\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.370544 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:18 crc kubenswrapper[4868]: I1003 13:29:18.912983 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4"] Oct 03 13:29:19 crc kubenswrapper[4868]: I1003 13:29:19.679697 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" event={"ID":"466cdf6d-842e-4f9a-a1de-7f7471ec3c14","Type":"ContainerStarted","Data":"f7407c575081d75eaee6ff9f36880cd97a1b40bf27a31c228ccd7bd7ed170d40"} Oct 03 13:29:19 crc kubenswrapper[4868]: I1003 13:29:19.680755 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" event={"ID":"466cdf6d-842e-4f9a-a1de-7f7471ec3c14","Type":"ContainerStarted","Data":"94e04416609e14ea1210e72cb1e03922d02682c209815db4271e11812698a6f2"} Oct 03 13:29:23 crc kubenswrapper[4868]: I1003 13:29:23.544939 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:29:23 crc kubenswrapper[4868]: E1003 13:29:23.545942 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:29:37 crc kubenswrapper[4868]: I1003 13:29:37.545678 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:29:37 crc kubenswrapper[4868]: E1003 13:29:37.546729 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:29:50 crc kubenswrapper[4868]: I1003 13:29:50.016285 4868 generic.go:334] "Generic (PLEG): container finished" podID="466cdf6d-842e-4f9a-a1de-7f7471ec3c14" containerID="f7407c575081d75eaee6ff9f36880cd97a1b40bf27a31c228ccd7bd7ed170d40" exitCode=2 Oct 03 13:29:50 crc kubenswrapper[4868]: I1003 13:29:50.016556 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" event={"ID":"466cdf6d-842e-4f9a-a1de-7f7471ec3c14","Type":"ContainerDied","Data":"f7407c575081d75eaee6ff9f36880cd97a1b40bf27a31c228ccd7bd7ed170d40"} Oct 03 13:29:50 crc kubenswrapper[4868]: I1003 13:29:50.545620 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:29:50 crc kubenswrapper[4868]: E1003 13:29:50.545966 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.536828 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.680779 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-inventory\") pod \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.681187 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovncontroller-config-0\") pod \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.681308 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kcqkj\" (UniqueName: \"kubernetes.io/projected/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-kube-api-access-kcqkj\") pod \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.681345 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovn-combined-ca-bundle\") pod \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.681555 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ssh-key\") pod \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\" (UID: \"466cdf6d-842e-4f9a-a1de-7f7471ec3c14\") " Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.690352 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-kube-api-access-kcqkj" (OuterVolumeSpecName: "kube-api-access-kcqkj") pod "466cdf6d-842e-4f9a-a1de-7f7471ec3c14" (UID: "466cdf6d-842e-4f9a-a1de-7f7471ec3c14"). InnerVolumeSpecName "kube-api-access-kcqkj". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.691494 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "466cdf6d-842e-4f9a-a1de-7f7471ec3c14" (UID: "466cdf6d-842e-4f9a-a1de-7f7471ec3c14"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.709666 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "466cdf6d-842e-4f9a-a1de-7f7471ec3c14" (UID: "466cdf6d-842e-4f9a-a1de-7f7471ec3c14"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.720126 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-inventory" (OuterVolumeSpecName: "inventory") pod "466cdf6d-842e-4f9a-a1de-7f7471ec3c14" (UID: "466cdf6d-842e-4f9a-a1de-7f7471ec3c14"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.723168 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "466cdf6d-842e-4f9a-a1de-7f7471ec3c14" (UID: "466cdf6d-842e-4f9a-a1de-7f7471ec3c14"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.787389 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.787725 4868 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.787829 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kcqkj\" (UniqueName: \"kubernetes.io/projected/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-kube-api-access-kcqkj\") on node \"crc\" DevicePath \"\"" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.787918 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:29:51 crc kubenswrapper[4868]: I1003 13:29:51.787998 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/466cdf6d-842e-4f9a-a1de-7f7471ec3c14-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:29:52 crc kubenswrapper[4868]: I1003 13:29:52.040098 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" event={"ID":"466cdf6d-842e-4f9a-a1de-7f7471ec3c14","Type":"ContainerDied","Data":"94e04416609e14ea1210e72cb1e03922d02682c209815db4271e11812698a6f2"} Oct 03 13:29:52 crc kubenswrapper[4868]: I1003 13:29:52.040514 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="94e04416609e14ea1210e72cb1e03922d02682c209815db4271e11812698a6f2" Oct 03 13:29:52 crc kubenswrapper[4868]: I1003 13:29:52.040191 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-26cn4" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.173815 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx"] Oct 03 13:30:00 crc kubenswrapper[4868]: E1003 13:30:00.175803 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="466cdf6d-842e-4f9a-a1de-7f7471ec3c14" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.175835 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="466cdf6d-842e-4f9a-a1de-7f7471ec3c14" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.176148 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="466cdf6d-842e-4f9a-a1de-7f7471ec3c14" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.177300 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.180179 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.181566 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.185533 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx"] Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.312855 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a7f7e978-056c-48f1-bc05-c0368695ee2b-secret-volume\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.313791 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7f7e978-056c-48f1-bc05-c0368695ee2b-config-volume\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.313925 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2s6q\" (UniqueName: \"kubernetes.io/projected/a7f7e978-056c-48f1-bc05-c0368695ee2b-kube-api-access-p2s6q\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.416828 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a7f7e978-056c-48f1-bc05-c0368695ee2b-secret-volume\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.417175 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7f7e978-056c-48f1-bc05-c0368695ee2b-config-volume\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.417225 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2s6q\" (UniqueName: \"kubernetes.io/projected/a7f7e978-056c-48f1-bc05-c0368695ee2b-kube-api-access-p2s6q\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.418472 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7f7e978-056c-48f1-bc05-c0368695ee2b-config-volume\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.432485 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a7f7e978-056c-48f1-bc05-c0368695ee2b-secret-volume\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.439239 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2s6q\" (UniqueName: \"kubernetes.io/projected/a7f7e978-056c-48f1-bc05-c0368695ee2b-kube-api-access-p2s6q\") pod \"collect-profiles-29324970-t8shx\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:00 crc kubenswrapper[4868]: I1003 13:30:00.507633 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:01 crc kubenswrapper[4868]: I1003 13:30:01.024636 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx"] Oct 03 13:30:01 crc kubenswrapper[4868]: I1003 13:30:01.153786 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" event={"ID":"a7f7e978-056c-48f1-bc05-c0368695ee2b","Type":"ContainerStarted","Data":"d611871c105309380edb72ffffb6e0a8dac6f7ee7e10fa5d2788f22489218ac0"} Oct 03 13:30:02 crc kubenswrapper[4868]: I1003 13:30:02.168167 4868 generic.go:334] "Generic (PLEG): container finished" podID="a7f7e978-056c-48f1-bc05-c0368695ee2b" containerID="50d1e5946fffb816e5f8cb4eeee9e7ff2cf7d5713c064c969fcfc24a931a3f0d" exitCode=0 Oct 03 13:30:02 crc kubenswrapper[4868]: I1003 13:30:02.168233 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" event={"ID":"a7f7e978-056c-48f1-bc05-c0368695ee2b","Type":"ContainerDied","Data":"50d1e5946fffb816e5f8cb4eeee9e7ff2cf7d5713c064c969fcfc24a931a3f0d"} Oct 03 13:30:02 crc kubenswrapper[4868]: I1003 13:30:02.545545 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:30:02 crc kubenswrapper[4868]: E1003 13:30:02.546078 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.554803 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.690921 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a7f7e978-056c-48f1-bc05-c0368695ee2b-secret-volume\") pod \"a7f7e978-056c-48f1-bc05-c0368695ee2b\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.691190 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2s6q\" (UniqueName: \"kubernetes.io/projected/a7f7e978-056c-48f1-bc05-c0368695ee2b-kube-api-access-p2s6q\") pod \"a7f7e978-056c-48f1-bc05-c0368695ee2b\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.691232 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7f7e978-056c-48f1-bc05-c0368695ee2b-config-volume\") pod \"a7f7e978-056c-48f1-bc05-c0368695ee2b\" (UID: \"a7f7e978-056c-48f1-bc05-c0368695ee2b\") " Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.692834 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a7f7e978-056c-48f1-bc05-c0368695ee2b-config-volume" (OuterVolumeSpecName: "config-volume") pod "a7f7e978-056c-48f1-bc05-c0368695ee2b" (UID: "a7f7e978-056c-48f1-bc05-c0368695ee2b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.700153 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a7f7e978-056c-48f1-bc05-c0368695ee2b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "a7f7e978-056c-48f1-bc05-c0368695ee2b" (UID: "a7f7e978-056c-48f1-bc05-c0368695ee2b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.700278 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7f7e978-056c-48f1-bc05-c0368695ee2b-kube-api-access-p2s6q" (OuterVolumeSpecName: "kube-api-access-p2s6q") pod "a7f7e978-056c-48f1-bc05-c0368695ee2b" (UID: "a7f7e978-056c-48f1-bc05-c0368695ee2b"). InnerVolumeSpecName "kube-api-access-p2s6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.794654 4868 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/a7f7e978-056c-48f1-bc05-c0368695ee2b-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.794721 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2s6q\" (UniqueName: \"kubernetes.io/projected/a7f7e978-056c-48f1-bc05-c0368695ee2b-kube-api-access-p2s6q\") on node \"crc\" DevicePath \"\"" Oct 03 13:30:03 crc kubenswrapper[4868]: I1003 13:30:03.794736 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/a7f7e978-056c-48f1-bc05-c0368695ee2b-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:30:04 crc kubenswrapper[4868]: I1003 13:30:04.192158 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" event={"ID":"a7f7e978-056c-48f1-bc05-c0368695ee2b","Type":"ContainerDied","Data":"d611871c105309380edb72ffffb6e0a8dac6f7ee7e10fa5d2788f22489218ac0"} Oct 03 13:30:04 crc kubenswrapper[4868]: I1003 13:30:04.192215 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d611871c105309380edb72ffffb6e0a8dac6f7ee7e10fa5d2788f22489218ac0" Oct 03 13:30:04 crc kubenswrapper[4868]: I1003 13:30:04.192257 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx" Oct 03 13:30:04 crc kubenswrapper[4868]: I1003 13:30:04.651798 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b"] Oct 03 13:30:04 crc kubenswrapper[4868]: I1003 13:30:04.665640 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324925-9r58b"] Oct 03 13:30:06 crc kubenswrapper[4868]: I1003 13:30:06.558939 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3" path="/var/lib/kubelet/pods/b00c1fa5-e27f-48f3-b2ba-c330a1d1dba3/volumes" Oct 03 13:30:13 crc kubenswrapper[4868]: I1003 13:30:13.545067 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:30:13 crc kubenswrapper[4868]: E1003 13:30:13.546160 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:30:15 crc kubenswrapper[4868]: I1003 13:30:15.769873 4868 scope.go:117] "RemoveContainer" containerID="93baee7adc8e9db80dcd0657442ef12c49310b2a3b0810303c9b4b963dd2c958" Oct 03 13:30:24 crc kubenswrapper[4868]: I1003 13:30:24.551098 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:30:24 crc kubenswrapper[4868]: E1003 13:30:24.552438 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:30:39 crc kubenswrapper[4868]: I1003 13:30:39.545100 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:30:39 crc kubenswrapper[4868]: E1003 13:30:39.546322 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:30:54 crc kubenswrapper[4868]: I1003 13:30:54.550501 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:30:54 crc kubenswrapper[4868]: E1003 13:30:54.551466 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.033533 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x"] Oct 03 13:31:09 crc kubenswrapper[4868]: E1003 13:31:09.034722 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7f7e978-056c-48f1-bc05-c0368695ee2b" containerName="collect-profiles" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.034784 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7f7e978-056c-48f1-bc05-c0368695ee2b" containerName="collect-profiles" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.035477 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7f7e978-056c-48f1-bc05-c0368695ee2b" containerName="collect-profiles" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.036344 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.039108 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.040022 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.040336 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.040463 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.040816 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.045861 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x"] Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.205882 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvt7h\" (UniqueName: \"kubernetes.io/projected/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-kube-api-access-rvt7h\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.205986 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.206036 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.206094 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.206123 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.307886 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvt7h\" (UniqueName: \"kubernetes.io/projected/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-kube-api-access-rvt7h\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.307995 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.308025 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.308080 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.308110 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.309230 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.314197 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.320165 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.320213 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.325938 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvt7h\" (UniqueName: \"kubernetes.io/projected/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-kube-api-access-rvt7h\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-r6j6x\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.364074 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.554361 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:31:09 crc kubenswrapper[4868]: E1003 13:31:09.557653 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.903318 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x"] Oct 03 13:31:09 crc kubenswrapper[4868]: W1003 13:31:09.907591 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb9f77be8_e1a6_4f09_9484_28f3a7fb60c9.slice/crio-5e5bdb4f4f33d497871b9caa5d693261647a6f278ae05448ca00480e49dbf7fe WatchSource:0}: Error finding container 5e5bdb4f4f33d497871b9caa5d693261647a6f278ae05448ca00480e49dbf7fe: Status 404 returned error can't find the container with id 5e5bdb4f4f33d497871b9caa5d693261647a6f278ae05448ca00480e49dbf7fe Oct 03 13:31:09 crc kubenswrapper[4868]: I1003 13:31:09.910371 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:31:10 crc kubenswrapper[4868]: I1003 13:31:10.925737 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" event={"ID":"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9","Type":"ContainerStarted","Data":"e022057dc1ec6df9fe568be286e2d7b294b77f3f3b99864d161eb03c27c82932"} Oct 03 13:31:10 crc kubenswrapper[4868]: I1003 13:31:10.926134 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" event={"ID":"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9","Type":"ContainerStarted","Data":"5e5bdb4f4f33d497871b9caa5d693261647a6f278ae05448ca00480e49dbf7fe"} Oct 03 13:31:10 crc kubenswrapper[4868]: I1003 13:31:10.951151 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" podStartSLOduration=1.7380491139999998 podStartE2EDuration="1.951128109s" podCreationTimestamp="2025-10-03 13:31:09 +0000 UTC" firstStartedPulling="2025-10-03 13:31:09.910076579 +0000 UTC m=+2466.119925645" lastFinishedPulling="2025-10-03 13:31:10.123155574 +0000 UTC m=+2466.333004640" observedRunningTime="2025-10-03 13:31:10.949340682 +0000 UTC m=+2467.159189748" watchObservedRunningTime="2025-10-03 13:31:10.951128109 +0000 UTC m=+2467.160977175" Oct 03 13:31:24 crc kubenswrapper[4868]: I1003 13:31:24.552369 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:31:24 crc kubenswrapper[4868]: E1003 13:31:24.554746 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:31:38 crc kubenswrapper[4868]: I1003 13:31:38.544930 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:31:38 crc kubenswrapper[4868]: E1003 13:31:38.546174 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:31:42 crc kubenswrapper[4868]: I1003 13:31:42.241207 4868 generic.go:334] "Generic (PLEG): container finished" podID="b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" containerID="e022057dc1ec6df9fe568be286e2d7b294b77f3f3b99864d161eb03c27c82932" exitCode=2 Oct 03 13:31:42 crc kubenswrapper[4868]: I1003 13:31:42.241303 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" event={"ID":"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9","Type":"ContainerDied","Data":"e022057dc1ec6df9fe568be286e2d7b294b77f3f3b99864d161eb03c27c82932"} Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.697353 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.755026 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovncontroller-config-0\") pod \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.755166 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-inventory\") pod \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.755257 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovn-combined-ca-bundle\") pod \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.755292 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvt7h\" (UniqueName: \"kubernetes.io/projected/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-kube-api-access-rvt7h\") pod \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.755490 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ssh-key\") pod \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\" (UID: \"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9\") " Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.764581 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-kube-api-access-rvt7h" (OuterVolumeSpecName: "kube-api-access-rvt7h") pod "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" (UID: "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9"). InnerVolumeSpecName "kube-api-access-rvt7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.767436 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" (UID: "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.794386 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-inventory" (OuterVolumeSpecName: "inventory") pod "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" (UID: "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.794680 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" (UID: "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.795931 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" (UID: "b9f77be8-e1a6-4f09-9484-28f3a7fb60c9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.859121 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.859344 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvt7h\" (UniqueName: \"kubernetes.io/projected/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-kube-api-access-rvt7h\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.859482 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.859570 4868 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:43 crc kubenswrapper[4868]: I1003 13:31:43.859663 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9f77be8-e1a6-4f09-9484-28f3a7fb60c9-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:31:44 crc kubenswrapper[4868]: I1003 13:31:44.265185 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" event={"ID":"b9f77be8-e1a6-4f09-9484-28f3a7fb60c9","Type":"ContainerDied","Data":"5e5bdb4f4f33d497871b9caa5d693261647a6f278ae05448ca00480e49dbf7fe"} Oct 03 13:31:44 crc kubenswrapper[4868]: I1003 13:31:44.265255 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e5bdb4f4f33d497871b9caa5d693261647a6f278ae05448ca00480e49dbf7fe" Oct 03 13:31:44 crc kubenswrapper[4868]: I1003 13:31:44.265280 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-r6j6x" Oct 03 13:31:52 crc kubenswrapper[4868]: I1003 13:31:52.547267 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:31:52 crc kubenswrapper[4868]: E1003 13:31:52.548377 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:32:05 crc kubenswrapper[4868]: I1003 13:32:05.544239 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:32:05 crc kubenswrapper[4868]: E1003 13:32:05.545209 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:32:17 crc kubenswrapper[4868]: I1003 13:32:17.544797 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:32:17 crc kubenswrapper[4868]: E1003 13:32:17.545796 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:32:31 crc kubenswrapper[4868]: I1003 13:32:31.544589 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:32:31 crc kubenswrapper[4868]: E1003 13:32:31.545439 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:32:42 crc kubenswrapper[4868]: I1003 13:32:42.545012 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:32:42 crc kubenswrapper[4868]: E1003 13:32:42.545870 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:32:55 crc kubenswrapper[4868]: I1003 13:32:55.544373 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:32:55 crc kubenswrapper[4868]: E1003 13:32:55.545467 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:33:07 crc kubenswrapper[4868]: I1003 13:33:07.545030 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:33:08 crc kubenswrapper[4868]: I1003 13:33:08.128952 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"45aa04e365ba914a39f55ba6aa6adb5acad82531ae9141a4a47b1da6336aa460"} Oct 03 13:33:15 crc kubenswrapper[4868]: I1003 13:33:15.899921 4868 scope.go:117] "RemoveContainer" containerID="1ed4b528b1a5e6ca24aefce711c73324b5423aa042a619091414ba6a2d2f639d" Oct 03 13:34:15 crc kubenswrapper[4868]: I1003 13:34:15.957778 4868 scope.go:117] "RemoveContainer" containerID="3615d9ff6ee4bb53fee19c9e875973c8316a1f252332926954028dc72cbbc5eb" Oct 03 13:34:16 crc kubenswrapper[4868]: I1003 13:34:16.002401 4868 scope.go:117] "RemoveContainer" containerID="d22fbbea678bd5fae51d92b1727c393b4c330d251ace1995d33782e16a19e691" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.035144 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc"] Oct 03 13:34:21 crc kubenswrapper[4868]: E1003 13:34:21.035954 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.035969 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.036223 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9f77be8-e1a6-4f09-9484-28f3a7fb60c9" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.037030 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.041682 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.041937 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.042353 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.042518 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.042623 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.046573 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc"] Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.171564 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.171639 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.171750 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.171797 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3564d986-0715-4a9a-acf7-caebd6007fcf-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.171835 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nsjh8\" (UniqueName: \"kubernetes.io/projected/3564d986-0715-4a9a-acf7-caebd6007fcf-kube-api-access-nsjh8\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.273541 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.273610 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.273678 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.273725 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3564d986-0715-4a9a-acf7-caebd6007fcf-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.273761 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nsjh8\" (UniqueName: \"kubernetes.io/projected/3564d986-0715-4a9a-acf7-caebd6007fcf-kube-api-access-nsjh8\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.274980 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3564d986-0715-4a9a-acf7-caebd6007fcf-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.282021 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.282556 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.283020 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.293574 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nsjh8\" (UniqueName: \"kubernetes.io/projected/3564d986-0715-4a9a-acf7-caebd6007fcf-kube-api-access-nsjh8\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-ng6mc\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.362100 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:21 crc kubenswrapper[4868]: I1003 13:34:21.924905 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc"] Oct 03 13:34:22 crc kubenswrapper[4868]: I1003 13:34:22.849885 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" event={"ID":"3564d986-0715-4a9a-acf7-caebd6007fcf","Type":"ContainerStarted","Data":"e5d0ead7489bc4cbbf555d25c44b5fc564e03f3108a65a35ef44daa44beb2110"} Oct 03 13:34:22 crc kubenswrapper[4868]: I1003 13:34:22.850292 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" event={"ID":"3564d986-0715-4a9a-acf7-caebd6007fcf","Type":"ContainerStarted","Data":"9e167a2f442e99c03805d5809ce18bb12ef8c1a170b2e6c9cd26ea55c6ba0667"} Oct 03 13:34:22 crc kubenswrapper[4868]: I1003 13:34:22.876337 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" podStartSLOduration=1.351206234 podStartE2EDuration="1.876300937s" podCreationTimestamp="2025-10-03 13:34:21 +0000 UTC" firstStartedPulling="2025-10-03 13:34:21.935358341 +0000 UTC m=+2658.145207407" lastFinishedPulling="2025-10-03 13:34:22.460453044 +0000 UTC m=+2658.670302110" observedRunningTime="2025-10-03 13:34:22.867189623 +0000 UTC m=+2659.077038689" watchObservedRunningTime="2025-10-03 13:34:22.876300937 +0000 UTC m=+2659.086150003" Oct 03 13:34:53 crc kubenswrapper[4868]: I1003 13:34:53.162814 4868 generic.go:334] "Generic (PLEG): container finished" podID="3564d986-0715-4a9a-acf7-caebd6007fcf" containerID="e5d0ead7489bc4cbbf555d25c44b5fc564e03f3108a65a35ef44daa44beb2110" exitCode=2 Oct 03 13:34:53 crc kubenswrapper[4868]: I1003 13:34:53.162915 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" event={"ID":"3564d986-0715-4a9a-acf7-caebd6007fcf","Type":"ContainerDied","Data":"e5d0ead7489bc4cbbf555d25c44b5fc564e03f3108a65a35ef44daa44beb2110"} Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.574435 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.674896 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nsjh8\" (UniqueName: \"kubernetes.io/projected/3564d986-0715-4a9a-acf7-caebd6007fcf-kube-api-access-nsjh8\") pod \"3564d986-0715-4a9a-acf7-caebd6007fcf\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.675198 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3564d986-0715-4a9a-acf7-caebd6007fcf-ovncontroller-config-0\") pod \"3564d986-0715-4a9a-acf7-caebd6007fcf\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.675310 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-inventory\") pod \"3564d986-0715-4a9a-acf7-caebd6007fcf\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.675539 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ovn-combined-ca-bundle\") pod \"3564d986-0715-4a9a-acf7-caebd6007fcf\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.675703 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ssh-key\") pod \"3564d986-0715-4a9a-acf7-caebd6007fcf\" (UID: \"3564d986-0715-4a9a-acf7-caebd6007fcf\") " Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.683100 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3564d986-0715-4a9a-acf7-caebd6007fcf-kube-api-access-nsjh8" (OuterVolumeSpecName: "kube-api-access-nsjh8") pod "3564d986-0715-4a9a-acf7-caebd6007fcf" (UID: "3564d986-0715-4a9a-acf7-caebd6007fcf"). InnerVolumeSpecName "kube-api-access-nsjh8". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.683198 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "3564d986-0715-4a9a-acf7-caebd6007fcf" (UID: "3564d986-0715-4a9a-acf7-caebd6007fcf"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.704302 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3564d986-0715-4a9a-acf7-caebd6007fcf-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "3564d986-0715-4a9a-acf7-caebd6007fcf" (UID: "3564d986-0715-4a9a-acf7-caebd6007fcf"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.706819 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-inventory" (OuterVolumeSpecName: "inventory") pod "3564d986-0715-4a9a-acf7-caebd6007fcf" (UID: "3564d986-0715-4a9a-acf7-caebd6007fcf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.713224 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3564d986-0715-4a9a-acf7-caebd6007fcf" (UID: "3564d986-0715-4a9a-acf7-caebd6007fcf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.779570 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nsjh8\" (UniqueName: \"kubernetes.io/projected/3564d986-0715-4a9a-acf7-caebd6007fcf-kube-api-access-nsjh8\") on node \"crc\" DevicePath \"\"" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.779612 4868 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/3564d986-0715-4a9a-acf7-caebd6007fcf-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.779623 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.779636 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:34:54 crc kubenswrapper[4868]: I1003 13:34:54.779645 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3564d986-0715-4a9a-acf7-caebd6007fcf-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:34:55 crc kubenswrapper[4868]: I1003 13:34:55.183807 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" event={"ID":"3564d986-0715-4a9a-acf7-caebd6007fcf","Type":"ContainerDied","Data":"9e167a2f442e99c03805d5809ce18bb12ef8c1a170b2e6c9cd26ea55c6ba0667"} Oct 03 13:34:55 crc kubenswrapper[4868]: I1003 13:34:55.184154 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e167a2f442e99c03805d5809ce18bb12ef8c1a170b2e6c9cd26ea55c6ba0667" Oct 03 13:34:55 crc kubenswrapper[4868]: I1003 13:34:55.183871 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-ng6mc" Oct 03 13:35:32 crc kubenswrapper[4868]: I1003 13:35:32.145364 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:35:32 crc kubenswrapper[4868]: I1003 13:35:32.146045 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.542078 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9rgrc"] Oct 03 13:36:00 crc kubenswrapper[4868]: E1003 13:36:00.544190 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3564d986-0715-4a9a-acf7-caebd6007fcf" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.544285 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="3564d986-0715-4a9a-acf7-caebd6007fcf" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.544640 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="3564d986-0715-4a9a-acf7-caebd6007fcf" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.547962 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.578296 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9rgrc"] Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.682510 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-utilities\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.683226 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-catalog-content\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.683931 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ms4vh\" (UniqueName: \"kubernetes.io/projected/b850772b-1d50-429b-9ab0-0ec5509735e0-kube-api-access-ms4vh\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.785733 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ms4vh\" (UniqueName: \"kubernetes.io/projected/b850772b-1d50-429b-9ab0-0ec5509735e0-kube-api-access-ms4vh\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.785823 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-utilities\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.785905 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-catalog-content\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.786495 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-utilities\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.786561 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-catalog-content\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.810978 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ms4vh\" (UniqueName: \"kubernetes.io/projected/b850772b-1d50-429b-9ab0-0ec5509735e0-kube-api-access-ms4vh\") pod \"redhat-operators-9rgrc\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:00 crc kubenswrapper[4868]: I1003 13:36:00.874237 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:01 crc kubenswrapper[4868]: I1003 13:36:01.405269 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9rgrc"] Oct 03 13:36:01 crc kubenswrapper[4868]: I1003 13:36:01.864358 4868 generic.go:334] "Generic (PLEG): container finished" podID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerID="babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7" exitCode=0 Oct 03 13:36:01 crc kubenswrapper[4868]: I1003 13:36:01.864421 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rgrc" event={"ID":"b850772b-1d50-429b-9ab0-0ec5509735e0","Type":"ContainerDied","Data":"babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7"} Oct 03 13:36:01 crc kubenswrapper[4868]: I1003 13:36:01.864456 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rgrc" event={"ID":"b850772b-1d50-429b-9ab0-0ec5509735e0","Type":"ContainerStarted","Data":"4ecbeb564caa046f6139a139441fd3b9cb261f11fadb41b410eb26a12a88d426"} Oct 03 13:36:02 crc kubenswrapper[4868]: I1003 13:36:02.146201 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:36:02 crc kubenswrapper[4868]: I1003 13:36:02.146260 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:36:03 crc kubenswrapper[4868]: I1003 13:36:03.892919 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rgrc" event={"ID":"b850772b-1d50-429b-9ab0-0ec5509735e0","Type":"ContainerStarted","Data":"49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6"} Oct 03 13:36:04 crc kubenswrapper[4868]: I1003 13:36:04.904616 4868 generic.go:334] "Generic (PLEG): container finished" podID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerID="49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6" exitCode=0 Oct 03 13:36:04 crc kubenswrapper[4868]: I1003 13:36:04.904679 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rgrc" event={"ID":"b850772b-1d50-429b-9ab0-0ec5509735e0","Type":"ContainerDied","Data":"49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6"} Oct 03 13:36:06 crc kubenswrapper[4868]: I1003 13:36:06.928907 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rgrc" event={"ID":"b850772b-1d50-429b-9ab0-0ec5509735e0","Type":"ContainerStarted","Data":"09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59"} Oct 03 13:36:06 crc kubenswrapper[4868]: I1003 13:36:06.958592 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9rgrc" podStartSLOduration=2.763885278 podStartE2EDuration="6.958567588s" podCreationTimestamp="2025-10-03 13:36:00 +0000 UTC" firstStartedPulling="2025-10-03 13:36:01.866460529 +0000 UTC m=+2758.076309595" lastFinishedPulling="2025-10-03 13:36:06.061142839 +0000 UTC m=+2762.270991905" observedRunningTime="2025-10-03 13:36:06.949139326 +0000 UTC m=+2763.158988412" watchObservedRunningTime="2025-10-03 13:36:06.958567588 +0000 UTC m=+2763.168416654" Oct 03 13:36:10 crc kubenswrapper[4868]: I1003 13:36:10.875208 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:10 crc kubenswrapper[4868]: I1003 13:36:10.875844 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:10 crc kubenswrapper[4868]: I1003 13:36:10.925723 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:20 crc kubenswrapper[4868]: I1003 13:36:20.931331 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:20 crc kubenswrapper[4868]: I1003 13:36:20.991967 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9rgrc"] Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.059029 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9rgrc" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="registry-server" containerID="cri-o://09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59" gracePeriod=2 Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.558440 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.632665 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-utilities\") pod \"b850772b-1d50-429b-9ab0-0ec5509735e0\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.632946 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ms4vh\" (UniqueName: \"kubernetes.io/projected/b850772b-1d50-429b-9ab0-0ec5509735e0-kube-api-access-ms4vh\") pod \"b850772b-1d50-429b-9ab0-0ec5509735e0\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.633105 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-catalog-content\") pod \"b850772b-1d50-429b-9ab0-0ec5509735e0\" (UID: \"b850772b-1d50-429b-9ab0-0ec5509735e0\") " Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.633701 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-utilities" (OuterVolumeSpecName: "utilities") pod "b850772b-1d50-429b-9ab0-0ec5509735e0" (UID: "b850772b-1d50-429b-9ab0-0ec5509735e0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.633851 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.640599 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b850772b-1d50-429b-9ab0-0ec5509735e0-kube-api-access-ms4vh" (OuterVolumeSpecName: "kube-api-access-ms4vh") pod "b850772b-1d50-429b-9ab0-0ec5509735e0" (UID: "b850772b-1d50-429b-9ab0-0ec5509735e0"). InnerVolumeSpecName "kube-api-access-ms4vh". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.729677 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b850772b-1d50-429b-9ab0-0ec5509735e0" (UID: "b850772b-1d50-429b-9ab0-0ec5509735e0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.736504 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b850772b-1d50-429b-9ab0-0ec5509735e0-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:21 crc kubenswrapper[4868]: I1003 13:36:21.736582 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ms4vh\" (UniqueName: \"kubernetes.io/projected/b850772b-1d50-429b-9ab0-0ec5509735e0-kube-api-access-ms4vh\") on node \"crc\" DevicePath \"\"" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.073011 4868 generic.go:334] "Generic (PLEG): container finished" podID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerID="09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59" exitCode=0 Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.073095 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rgrc" event={"ID":"b850772b-1d50-429b-9ab0-0ec5509735e0","Type":"ContainerDied","Data":"09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59"} Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.073135 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9rgrc" event={"ID":"b850772b-1d50-429b-9ab0-0ec5509735e0","Type":"ContainerDied","Data":"4ecbeb564caa046f6139a139441fd3b9cb261f11fadb41b410eb26a12a88d426"} Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.073141 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9rgrc" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.073252 4868 scope.go:117] "RemoveContainer" containerID="09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.112189 4868 scope.go:117] "RemoveContainer" containerID="49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.112315 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9rgrc"] Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.120835 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9rgrc"] Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.132116 4868 scope.go:117] "RemoveContainer" containerID="babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.178740 4868 scope.go:117] "RemoveContainer" containerID="09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59" Oct 03 13:36:22 crc kubenswrapper[4868]: E1003 13:36:22.179298 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59\": container with ID starting with 09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59 not found: ID does not exist" containerID="09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.179345 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59"} err="failed to get container status \"09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59\": rpc error: code = NotFound desc = could not find container \"09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59\": container with ID starting with 09f5d50ec16e04075a1fa579f9bce7293c711d79b6d8057f8454826496e1bb59 not found: ID does not exist" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.179373 4868 scope.go:117] "RemoveContainer" containerID="49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6" Oct 03 13:36:22 crc kubenswrapper[4868]: E1003 13:36:22.179739 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6\": container with ID starting with 49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6 not found: ID does not exist" containerID="49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.179789 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6"} err="failed to get container status \"49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6\": rpc error: code = NotFound desc = could not find container \"49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6\": container with ID starting with 49644c92d03d46d66667bb7da8492816a47ada072c699c1dbd61a063b7a6d5d6 not found: ID does not exist" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.179819 4868 scope.go:117] "RemoveContainer" containerID="babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7" Oct 03 13:36:22 crc kubenswrapper[4868]: E1003 13:36:22.180090 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7\": container with ID starting with babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7 not found: ID does not exist" containerID="babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.180121 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7"} err="failed to get container status \"babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7\": rpc error: code = NotFound desc = could not find container \"babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7\": container with ID starting with babffb2af6ca2d4357e02902f45881c2e03e290163dd7717b802c54a79cfebd7 not found: ID does not exist" Oct 03 13:36:22 crc kubenswrapper[4868]: I1003 13:36:22.559897 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" path="/var/lib/kubelet/pods/b850772b-1d50-429b-9ab0-0ec5509735e0/volumes" Oct 03 13:36:32 crc kubenswrapper[4868]: I1003 13:36:32.145598 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:36:32 crc kubenswrapper[4868]: I1003 13:36:32.146257 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:36:32 crc kubenswrapper[4868]: I1003 13:36:32.146321 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:36:32 crc kubenswrapper[4868]: I1003 13:36:32.147225 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"45aa04e365ba914a39f55ba6aa6adb5acad82531ae9141a4a47b1da6336aa460"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:36:32 crc kubenswrapper[4868]: I1003 13:36:32.147289 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://45aa04e365ba914a39f55ba6aa6adb5acad82531ae9141a4a47b1da6336aa460" gracePeriod=600 Oct 03 13:36:33 crc kubenswrapper[4868]: I1003 13:36:33.198002 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="45aa04e365ba914a39f55ba6aa6adb5acad82531ae9141a4a47b1da6336aa460" exitCode=0 Oct 03 13:36:33 crc kubenswrapper[4868]: I1003 13:36:33.198117 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"45aa04e365ba914a39f55ba6aa6adb5acad82531ae9141a4a47b1da6336aa460"} Oct 03 13:36:33 crc kubenswrapper[4868]: I1003 13:36:33.198705 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3"} Oct 03 13:36:33 crc kubenswrapper[4868]: I1003 13:36:33.198743 4868 scope.go:117] "RemoveContainer" containerID="1c262be08366f41ca53d35b10928ea7cf331551dd80fc4174f5e2f040f881e11" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.857807 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-5sbz9"] Oct 03 13:37:57 crc kubenswrapper[4868]: E1003 13:37:57.859687 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="extract-utilities" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.859713 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="extract-utilities" Oct 03 13:37:57 crc kubenswrapper[4868]: E1003 13:37:57.859752 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="registry-server" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.859762 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="registry-server" Oct 03 13:37:57 crc kubenswrapper[4868]: E1003 13:37:57.859804 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="extract-content" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.859812 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="extract-content" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.860033 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b850772b-1d50-429b-9ab0-0ec5509735e0" containerName="registry-server" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.861752 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.871399 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5sbz9"] Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.951466 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-utilities\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.951813 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w2c46\" (UniqueName: \"kubernetes.io/projected/300986b6-5a93-48fc-b8dd-1470ee5fb95e-kube-api-access-w2c46\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:57 crc kubenswrapper[4868]: I1003 13:37:57.952080 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-catalog-content\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.053470 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-utilities\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.053522 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w2c46\" (UniqueName: \"kubernetes.io/projected/300986b6-5a93-48fc-b8dd-1470ee5fb95e-kube-api-access-w2c46\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.053673 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-catalog-content\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.054155 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-catalog-content\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.054669 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-utilities\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.076465 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w2c46\" (UniqueName: \"kubernetes.io/projected/300986b6-5a93-48fc-b8dd-1470ee5fb95e-kube-api-access-w2c46\") pod \"certified-operators-5sbz9\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.184738 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:37:58 crc kubenswrapper[4868]: I1003 13:37:58.685533 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-5sbz9"] Oct 03 13:37:59 crc kubenswrapper[4868]: I1003 13:37:59.045072 4868 generic.go:334] "Generic (PLEG): container finished" podID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerID="8822c68731594f35833b5b12a99dd90a31374adb4653f3c5d4a2a611f4866d41" exitCode=0 Oct 03 13:37:59 crc kubenswrapper[4868]: I1003 13:37:59.045155 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sbz9" event={"ID":"300986b6-5a93-48fc-b8dd-1470ee5fb95e","Type":"ContainerDied","Data":"8822c68731594f35833b5b12a99dd90a31374adb4653f3c5d4a2a611f4866d41"} Oct 03 13:37:59 crc kubenswrapper[4868]: I1003 13:37:59.045534 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sbz9" event={"ID":"300986b6-5a93-48fc-b8dd-1470ee5fb95e","Type":"ContainerStarted","Data":"fba4c3dabd66d784c766ddb4e11b9d8f913ba18fef9b7ee0930b6bf09993ae53"} Oct 03 13:37:59 crc kubenswrapper[4868]: I1003 13:37:59.047361 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:38:01 crc kubenswrapper[4868]: I1003 13:38:01.064475 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sbz9" event={"ID":"300986b6-5a93-48fc-b8dd-1470ee5fb95e","Type":"ContainerStarted","Data":"387c8770e4f79884c1b43b5d5164129c745442c251e99ed3603e303d8b99c585"} Oct 03 13:38:02 crc kubenswrapper[4868]: I1003 13:38:02.078315 4868 generic.go:334] "Generic (PLEG): container finished" podID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerID="387c8770e4f79884c1b43b5d5164129c745442c251e99ed3603e303d8b99c585" exitCode=0 Oct 03 13:38:02 crc kubenswrapper[4868]: I1003 13:38:02.078396 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sbz9" event={"ID":"300986b6-5a93-48fc-b8dd-1470ee5fb95e","Type":"ContainerDied","Data":"387c8770e4f79884c1b43b5d5164129c745442c251e99ed3603e303d8b99c585"} Oct 03 13:38:03 crc kubenswrapper[4868]: I1003 13:38:03.092653 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sbz9" event={"ID":"300986b6-5a93-48fc-b8dd-1470ee5fb95e","Type":"ContainerStarted","Data":"c388afc49593a06ad5a83cd3d36c35614330b35482e858f7d5d322ba96760641"} Oct 03 13:38:03 crc kubenswrapper[4868]: I1003 13:38:03.122796 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-5sbz9" podStartSLOduration=2.635488037 podStartE2EDuration="6.122772785s" podCreationTimestamp="2025-10-03 13:37:57 +0000 UTC" firstStartedPulling="2025-10-03 13:37:59.047026494 +0000 UTC m=+2875.256875560" lastFinishedPulling="2025-10-03 13:38:02.534311242 +0000 UTC m=+2878.744160308" observedRunningTime="2025-10-03 13:38:03.115539521 +0000 UTC m=+2879.325388597" watchObservedRunningTime="2025-10-03 13:38:03.122772785 +0000 UTC m=+2879.332621841" Oct 03 13:38:08 crc kubenswrapper[4868]: I1003 13:38:08.185256 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:38:08 crc kubenswrapper[4868]: I1003 13:38:08.185767 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:38:08 crc kubenswrapper[4868]: I1003 13:38:08.230246 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:38:09 crc kubenswrapper[4868]: I1003 13:38:09.202916 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:38:09 crc kubenswrapper[4868]: I1003 13:38:09.306468 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5sbz9"] Oct 03 13:38:11 crc kubenswrapper[4868]: I1003 13:38:11.166533 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-5sbz9" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="registry-server" containerID="cri-o://c388afc49593a06ad5a83cd3d36c35614330b35482e858f7d5d322ba96760641" gracePeriod=2 Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.188358 4868 generic.go:334] "Generic (PLEG): container finished" podID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerID="c388afc49593a06ad5a83cd3d36c35614330b35482e858f7d5d322ba96760641" exitCode=0 Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.188700 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sbz9" event={"ID":"300986b6-5a93-48fc-b8dd-1470ee5fb95e","Type":"ContainerDied","Data":"c388afc49593a06ad5a83cd3d36c35614330b35482e858f7d5d322ba96760641"} Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.445174 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.496088 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-catalog-content\") pod \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.496150 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-utilities\") pod \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.496219 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w2c46\" (UniqueName: \"kubernetes.io/projected/300986b6-5a93-48fc-b8dd-1470ee5fb95e-kube-api-access-w2c46\") pod \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\" (UID: \"300986b6-5a93-48fc-b8dd-1470ee5fb95e\") " Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.497237 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-utilities" (OuterVolumeSpecName: "utilities") pod "300986b6-5a93-48fc-b8dd-1470ee5fb95e" (UID: "300986b6-5a93-48fc-b8dd-1470ee5fb95e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.503703 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/300986b6-5a93-48fc-b8dd-1470ee5fb95e-kube-api-access-w2c46" (OuterVolumeSpecName: "kube-api-access-w2c46") pod "300986b6-5a93-48fc-b8dd-1470ee5fb95e" (UID: "300986b6-5a93-48fc-b8dd-1470ee5fb95e"). InnerVolumeSpecName "kube-api-access-w2c46". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.546205 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "300986b6-5a93-48fc-b8dd-1470ee5fb95e" (UID: "300986b6-5a93-48fc-b8dd-1470ee5fb95e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.598535 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.598847 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/300986b6-5a93-48fc-b8dd-1470ee5fb95e-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:38:13 crc kubenswrapper[4868]: I1003 13:38:13.598937 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w2c46\" (UniqueName: \"kubernetes.io/projected/300986b6-5a93-48fc-b8dd-1470ee5fb95e-kube-api-access-w2c46\") on node \"crc\" DevicePath \"\"" Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.199708 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-5sbz9" event={"ID":"300986b6-5a93-48fc-b8dd-1470ee5fb95e","Type":"ContainerDied","Data":"fba4c3dabd66d784c766ddb4e11b9d8f913ba18fef9b7ee0930b6bf09993ae53"} Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.199756 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-5sbz9" Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.199769 4868 scope.go:117] "RemoveContainer" containerID="c388afc49593a06ad5a83cd3d36c35614330b35482e858f7d5d322ba96760641" Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.222811 4868 scope.go:117] "RemoveContainer" containerID="387c8770e4f79884c1b43b5d5164129c745442c251e99ed3603e303d8b99c585" Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.237077 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-5sbz9"] Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.245694 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-5sbz9"] Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.260694 4868 scope.go:117] "RemoveContainer" containerID="8822c68731594f35833b5b12a99dd90a31374adb4653f3c5d4a2a611f4866d41" Oct 03 13:38:14 crc kubenswrapper[4868]: I1003 13:38:14.557592 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" path="/var/lib/kubelet/pods/300986b6-5a93-48fc-b8dd-1470ee5fb95e/volumes" Oct 03 13:38:32 crc kubenswrapper[4868]: I1003 13:38:32.146073 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:38:32 crc kubenswrapper[4868]: I1003 13:38:32.146692 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:39:02 crc kubenswrapper[4868]: I1003 13:39:02.145119 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:39:02 crc kubenswrapper[4868]: I1003 13:39:02.145730 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.145832 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.146848 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.146929 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.148236 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.148305 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" gracePeriod=600 Oct 03 13:39:32 crc kubenswrapper[4868]: E1003 13:39:32.285392 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.993485 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" exitCode=0 Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.993902 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3"} Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.993949 4868 scope.go:117] "RemoveContainer" containerID="45aa04e365ba914a39f55ba6aa6adb5acad82531ae9141a4a47b1da6336aa460" Oct 03 13:39:32 crc kubenswrapper[4868]: I1003 13:39:32.995156 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:39:32 crc kubenswrapper[4868]: E1003 13:39:32.995434 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:39:48 crc kubenswrapper[4868]: I1003 13:39:48.544669 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:39:48 crc kubenswrapper[4868]: E1003 13:39:48.545410 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:39:59 crc kubenswrapper[4868]: I1003 13:39:59.545258 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:39:59 crc kubenswrapper[4868]: E1003 13:39:59.546264 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:40:11 crc kubenswrapper[4868]: I1003 13:40:11.544414 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:40:11 crc kubenswrapper[4868]: E1003 13:40:11.545353 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.051968 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j"] Oct 03 13:40:12 crc kubenswrapper[4868]: E1003 13:40:12.052802 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="registry-server" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.052821 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="registry-server" Oct 03 13:40:12 crc kubenswrapper[4868]: E1003 13:40:12.052856 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="extract-content" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.052864 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="extract-content" Oct 03 13:40:12 crc kubenswrapper[4868]: E1003 13:40:12.052882 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="extract-utilities" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.052890 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="extract-utilities" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.053158 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="300986b6-5a93-48fc-b8dd-1470ee5fb95e" containerName="registry-server" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.053999 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.056492 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-zd6vt" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.057336 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.057580 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.057858 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.063995 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.065150 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j"] Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.190064 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.190135 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27xtb\" (UniqueName: \"kubernetes.io/projected/21a6937f-c0e2-4f48-b641-c010d75d5f52-kube-api-access-27xtb\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.190231 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.190251 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.190289 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.292404 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.292455 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.292502 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.292575 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.292615 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27xtb\" (UniqueName: \"kubernetes.io/projected/21a6937f-c0e2-4f48-b641-c010d75d5f52-kube-api-access-27xtb\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.295606 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.300281 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.301670 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.302289 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.313638 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27xtb\" (UniqueName: \"kubernetes.io/projected/21a6937f-c0e2-4f48-b641-c010d75d5f52-kube-api-access-27xtb\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-jvv5j\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:12 crc kubenswrapper[4868]: I1003 13:40:12.374415 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:13 crc kubenswrapper[4868]: I1003 13:40:13.034015 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j"] Oct 03 13:40:13 crc kubenswrapper[4868]: I1003 13:40:13.358493 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" event={"ID":"21a6937f-c0e2-4f48-b641-c010d75d5f52","Type":"ContainerStarted","Data":"34c96c11b623585fd951936b92d3bad6fb654894ba52ce8eee94b6412a694e07"} Oct 03 13:40:14 crc kubenswrapper[4868]: I1003 13:40:14.368838 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" event={"ID":"21a6937f-c0e2-4f48-b641-c010d75d5f52","Type":"ContainerStarted","Data":"511eea5958e582b06ea00390cb7e88f6071399e25712ee5e8d906529c39b1b1c"} Oct 03 13:40:14 crc kubenswrapper[4868]: I1003 13:40:14.392877 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" podStartSLOduration=1.794755929 podStartE2EDuration="2.392859727s" podCreationTimestamp="2025-10-03 13:40:12 +0000 UTC" firstStartedPulling="2025-10-03 13:40:13.039703388 +0000 UTC m=+3009.249552454" lastFinishedPulling="2025-10-03 13:40:13.637807186 +0000 UTC m=+3009.847656252" observedRunningTime="2025-10-03 13:40:14.38290456 +0000 UTC m=+3010.592753646" watchObservedRunningTime="2025-10-03 13:40:14.392859727 +0000 UTC m=+3010.602708783" Oct 03 13:40:22 crc kubenswrapper[4868]: I1003 13:40:22.544565 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:40:22 crc kubenswrapper[4868]: E1003 13:40:22.546455 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:40:36 crc kubenswrapper[4868]: I1003 13:40:36.544291 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:40:36 crc kubenswrapper[4868]: E1003 13:40:36.545188 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:40:44 crc kubenswrapper[4868]: I1003 13:40:44.662216 4868 generic.go:334] "Generic (PLEG): container finished" podID="21a6937f-c0e2-4f48-b641-c010d75d5f52" containerID="511eea5958e582b06ea00390cb7e88f6071399e25712ee5e8d906529c39b1b1c" exitCode=2 Oct 03 13:40:44 crc kubenswrapper[4868]: I1003 13:40:44.662302 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" event={"ID":"21a6937f-c0e2-4f48-b641-c010d75d5f52","Type":"ContainerDied","Data":"511eea5958e582b06ea00390cb7e88f6071399e25712ee5e8d906529c39b1b1c"} Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.076356 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.172977 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovn-combined-ca-bundle\") pod \"21a6937f-c0e2-4f48-b641-c010d75d5f52\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.173070 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovncontroller-config-0\") pod \"21a6937f-c0e2-4f48-b641-c010d75d5f52\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.173097 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27xtb\" (UniqueName: \"kubernetes.io/projected/21a6937f-c0e2-4f48-b641-c010d75d5f52-kube-api-access-27xtb\") pod \"21a6937f-c0e2-4f48-b641-c010d75d5f52\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.173148 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-inventory\") pod \"21a6937f-c0e2-4f48-b641-c010d75d5f52\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.173302 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ssh-key\") pod \"21a6937f-c0e2-4f48-b641-c010d75d5f52\" (UID: \"21a6937f-c0e2-4f48-b641-c010d75d5f52\") " Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.178826 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21a6937f-c0e2-4f48-b641-c010d75d5f52-kube-api-access-27xtb" (OuterVolumeSpecName: "kube-api-access-27xtb") pod "21a6937f-c0e2-4f48-b641-c010d75d5f52" (UID: "21a6937f-c0e2-4f48-b641-c010d75d5f52"). InnerVolumeSpecName "kube-api-access-27xtb". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.178921 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "21a6937f-c0e2-4f48-b641-c010d75d5f52" (UID: "21a6937f-c0e2-4f48-b641-c010d75d5f52"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.199389 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "21a6937f-c0e2-4f48-b641-c010d75d5f52" (UID: "21a6937f-c0e2-4f48-b641-c010d75d5f52"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.204620 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "21a6937f-c0e2-4f48-b641-c010d75d5f52" (UID: "21a6937f-c0e2-4f48-b641-c010d75d5f52"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.204991 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-inventory" (OuterVolumeSpecName: "inventory") pod "21a6937f-c0e2-4f48-b641-c010d75d5f52" (UID: "21a6937f-c0e2-4f48-b641-c010d75d5f52"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.277249 4868 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.277283 4868 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21a6937f-c0e2-4f48-b641-c010d75d5f52-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.277298 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27xtb\" (UniqueName: \"kubernetes.io/projected/21a6937f-c0e2-4f48-b641-c010d75d5f52-kube-api-access-27xtb\") on node \"crc\" DevicePath \"\"" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.277312 4868 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-inventory\") on node \"crc\" DevicePath \"\"" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.277323 4868 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21a6937f-c0e2-4f48-b641-c010d75d5f52-ssh-key\") on node \"crc\" DevicePath \"\"" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.685128 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" event={"ID":"21a6937f-c0e2-4f48-b641-c010d75d5f52","Type":"ContainerDied","Data":"34c96c11b623585fd951936b92d3bad6fb654894ba52ce8eee94b6412a694e07"} Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.685181 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34c96c11b623585fd951936b92d3bad6fb654894ba52ce8eee94b6412a694e07" Oct 03 13:40:46 crc kubenswrapper[4868]: I1003 13:40:46.685277 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-jvv5j" Oct 03 13:40:49 crc kubenswrapper[4868]: I1003 13:40:49.544040 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:40:49 crc kubenswrapper[4868]: E1003 13:40:49.545009 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:41:02 crc kubenswrapper[4868]: I1003 13:41:02.544919 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:41:02 crc kubenswrapper[4868]: E1003 13:41:02.545854 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.466424 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nsknp"] Oct 03 13:41:04 crc kubenswrapper[4868]: E1003 13:41:04.467235 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21a6937f-c0e2-4f48-b641-c010d75d5f52" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.467255 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="21a6937f-c0e2-4f48-b641-c010d75d5f52" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.467545 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="21a6937f-c0e2-4f48-b641-c010d75d5f52" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.469295 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.477175 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsknp"] Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.538258 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-utilities\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.538440 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tgldm\" (UniqueName: \"kubernetes.io/projected/aee3da59-5b6a-4282-8783-e2b1260a93a8-kube-api-access-tgldm\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.538515 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-catalog-content\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.640400 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-utilities\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.640969 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tgldm\" (UniqueName: \"kubernetes.io/projected/aee3da59-5b6a-4282-8783-e2b1260a93a8-kube-api-access-tgldm\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.641018 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-catalog-content\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.641300 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-catalog-content\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.640890 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-utilities\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.662109 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tgldm\" (UniqueName: \"kubernetes.io/projected/aee3da59-5b6a-4282-8783-e2b1260a93a8-kube-api-access-tgldm\") pod \"community-operators-nsknp\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:04 crc kubenswrapper[4868]: I1003 13:41:04.809195 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:05 crc kubenswrapper[4868]: I1003 13:41:05.434559 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsknp"] Oct 03 13:41:05 crc kubenswrapper[4868]: I1003 13:41:05.878831 4868 generic.go:334] "Generic (PLEG): container finished" podID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerID="0a904fd22016b58ec9d3e59a02c6b9dbf80697d1260ac024ddfd53faca68324d" exitCode=0 Oct 03 13:41:05 crc kubenswrapper[4868]: I1003 13:41:05.878965 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsknp" event={"ID":"aee3da59-5b6a-4282-8783-e2b1260a93a8","Type":"ContainerDied","Data":"0a904fd22016b58ec9d3e59a02c6b9dbf80697d1260ac024ddfd53faca68324d"} Oct 03 13:41:05 crc kubenswrapper[4868]: I1003 13:41:05.879018 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsknp" event={"ID":"aee3da59-5b6a-4282-8783-e2b1260a93a8","Type":"ContainerStarted","Data":"da86117ef9b5dd8e0e529afd38310e4e2db8b5858601a395167e416feb7958ce"} Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.494619 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-bqjbv"] Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.501826 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.505838 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqjbv"] Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.611537 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-utilities\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.611641 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kdbf\" (UniqueName: \"kubernetes.io/projected/a591139e-7de1-417e-aecb-ccd4c2c105df-kube-api-access-7kdbf\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.611747 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-catalog-content\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.713010 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-utilities\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.713138 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kdbf\" (UniqueName: \"kubernetes.io/projected/a591139e-7de1-417e-aecb-ccd4c2c105df-kube-api-access-7kdbf\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.713201 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-catalog-content\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.713640 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-utilities\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.713669 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-catalog-content\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.732126 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kdbf\" (UniqueName: \"kubernetes.io/projected/a591139e-7de1-417e-aecb-ccd4c2c105df-kube-api-access-7kdbf\") pod \"redhat-marketplace-bqjbv\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.821683 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.907137 4868 generic.go:334] "Generic (PLEG): container finished" podID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerID="45b0a4d2d75b825227026b08945e194034f8aa70f38344d0eabec3e2acb37fa0" exitCode=0 Oct 03 13:41:07 crc kubenswrapper[4868]: I1003 13:41:07.907500 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsknp" event={"ID":"aee3da59-5b6a-4282-8783-e2b1260a93a8","Type":"ContainerDied","Data":"45b0a4d2d75b825227026b08945e194034f8aa70f38344d0eabec3e2acb37fa0"} Oct 03 13:41:08 crc kubenswrapper[4868]: I1003 13:41:08.279612 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqjbv"] Oct 03 13:41:08 crc kubenswrapper[4868]: W1003 13:41:08.280866 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda591139e_7de1_417e_aecb_ccd4c2c105df.slice/crio-848b8ad2646bbc86af8f66b01779762c2e344d72183c2afb9cc700299ee55dce WatchSource:0}: Error finding container 848b8ad2646bbc86af8f66b01779762c2e344d72183c2afb9cc700299ee55dce: Status 404 returned error can't find the container with id 848b8ad2646bbc86af8f66b01779762c2e344d72183c2afb9cc700299ee55dce Oct 03 13:41:08 crc kubenswrapper[4868]: I1003 13:41:08.919841 4868 generic.go:334] "Generic (PLEG): container finished" podID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerID="b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264" exitCode=0 Oct 03 13:41:08 crc kubenswrapper[4868]: I1003 13:41:08.919925 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqjbv" event={"ID":"a591139e-7de1-417e-aecb-ccd4c2c105df","Type":"ContainerDied","Data":"b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264"} Oct 03 13:41:08 crc kubenswrapper[4868]: I1003 13:41:08.920032 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqjbv" event={"ID":"a591139e-7de1-417e-aecb-ccd4c2c105df","Type":"ContainerStarted","Data":"848b8ad2646bbc86af8f66b01779762c2e344d72183c2afb9cc700299ee55dce"} Oct 03 13:41:08 crc kubenswrapper[4868]: I1003 13:41:08.922922 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsknp" event={"ID":"aee3da59-5b6a-4282-8783-e2b1260a93a8","Type":"ContainerStarted","Data":"bbd37181288798b7f7761ef1f1d0e4bb665e13fccb948cafed344202cb088aa8"} Oct 03 13:41:08 crc kubenswrapper[4868]: I1003 13:41:08.977230 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nsknp" podStartSLOduration=2.518205452 podStartE2EDuration="4.977207814s" podCreationTimestamp="2025-10-03 13:41:04 +0000 UTC" firstStartedPulling="2025-10-03 13:41:05.88186617 +0000 UTC m=+3062.091715236" lastFinishedPulling="2025-10-03 13:41:08.340868532 +0000 UTC m=+3064.550717598" observedRunningTime="2025-10-03 13:41:08.966681182 +0000 UTC m=+3065.176530278" watchObservedRunningTime="2025-10-03 13:41:08.977207814 +0000 UTC m=+3065.187056870" Oct 03 13:41:09 crc kubenswrapper[4868]: I1003 13:41:09.935397 4868 generic.go:334] "Generic (PLEG): container finished" podID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerID="1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91" exitCode=0 Oct 03 13:41:09 crc kubenswrapper[4868]: I1003 13:41:09.935491 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqjbv" event={"ID":"a591139e-7de1-417e-aecb-ccd4c2c105df","Type":"ContainerDied","Data":"1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91"} Oct 03 13:41:10 crc kubenswrapper[4868]: I1003 13:41:10.957351 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqjbv" event={"ID":"a591139e-7de1-417e-aecb-ccd4c2c105df","Type":"ContainerStarted","Data":"e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973"} Oct 03 13:41:10 crc kubenswrapper[4868]: I1003 13:41:10.978585 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-bqjbv" podStartSLOduration=2.377593237 podStartE2EDuration="3.978563441s" podCreationTimestamp="2025-10-03 13:41:07 +0000 UTC" firstStartedPulling="2025-10-03 13:41:08.922488779 +0000 UTC m=+3065.132337845" lastFinishedPulling="2025-10-03 13:41:10.523458983 +0000 UTC m=+3066.733308049" observedRunningTime="2025-10-03 13:41:10.976973728 +0000 UTC m=+3067.186822804" watchObservedRunningTime="2025-10-03 13:41:10.978563441 +0000 UTC m=+3067.188412517" Oct 03 13:41:14 crc kubenswrapper[4868]: I1003 13:41:14.809506 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:14 crc kubenswrapper[4868]: I1003 13:41:14.810000 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:14 crc kubenswrapper[4868]: I1003 13:41:14.853001 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:15 crc kubenswrapper[4868]: I1003 13:41:15.037996 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:15 crc kubenswrapper[4868]: I1003 13:41:15.257131 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nsknp"] Oct 03 13:41:15 crc kubenswrapper[4868]: I1003 13:41:15.543923 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:41:15 crc kubenswrapper[4868]: E1003 13:41:15.544300 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:41:17 crc kubenswrapper[4868]: I1003 13:41:17.010642 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-nsknp" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="registry-server" containerID="cri-o://bbd37181288798b7f7761ef1f1d0e4bb665e13fccb948cafed344202cb088aa8" gracePeriod=2 Oct 03 13:41:17 crc kubenswrapper[4868]: I1003 13:41:17.822221 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:17 crc kubenswrapper[4868]: I1003 13:41:17.822553 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:17 crc kubenswrapper[4868]: I1003 13:41:17.870392 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:18 crc kubenswrapper[4868]: I1003 13:41:18.022596 4868 generic.go:334] "Generic (PLEG): container finished" podID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerID="bbd37181288798b7f7761ef1f1d0e4bb665e13fccb948cafed344202cb088aa8" exitCode=0 Oct 03 13:41:18 crc kubenswrapper[4868]: I1003 13:41:18.022886 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsknp" event={"ID":"aee3da59-5b6a-4282-8783-e2b1260a93a8","Type":"ContainerDied","Data":"bbd37181288798b7f7761ef1f1d0e4bb665e13fccb948cafed344202cb088aa8"} Oct 03 13:41:18 crc kubenswrapper[4868]: I1003 13:41:18.075077 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:18 crc kubenswrapper[4868]: I1003 13:41:18.657706 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqjbv"] Oct 03 13:41:18 crc kubenswrapper[4868]: I1003 13:41:18.929944 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.036506 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsknp" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.036544 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsknp" event={"ID":"aee3da59-5b6a-4282-8783-e2b1260a93a8","Type":"ContainerDied","Data":"da86117ef9b5dd8e0e529afd38310e4e2db8b5858601a395167e416feb7958ce"} Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.036656 4868 scope.go:117] "RemoveContainer" containerID="bbd37181288798b7f7761ef1f1d0e4bb665e13fccb948cafed344202cb088aa8" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.043280 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-catalog-content\") pod \"aee3da59-5b6a-4282-8783-e2b1260a93a8\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.043469 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tgldm\" (UniqueName: \"kubernetes.io/projected/aee3da59-5b6a-4282-8783-e2b1260a93a8-kube-api-access-tgldm\") pod \"aee3da59-5b6a-4282-8783-e2b1260a93a8\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.043552 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-utilities\") pod \"aee3da59-5b6a-4282-8783-e2b1260a93a8\" (UID: \"aee3da59-5b6a-4282-8783-e2b1260a93a8\") " Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.044947 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-utilities" (OuterVolumeSpecName: "utilities") pod "aee3da59-5b6a-4282-8783-e2b1260a93a8" (UID: "aee3da59-5b6a-4282-8783-e2b1260a93a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.053783 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aee3da59-5b6a-4282-8783-e2b1260a93a8-kube-api-access-tgldm" (OuterVolumeSpecName: "kube-api-access-tgldm") pod "aee3da59-5b6a-4282-8783-e2b1260a93a8" (UID: "aee3da59-5b6a-4282-8783-e2b1260a93a8"). InnerVolumeSpecName "kube-api-access-tgldm". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.063809 4868 scope.go:117] "RemoveContainer" containerID="45b0a4d2d75b825227026b08945e194034f8aa70f38344d0eabec3e2acb37fa0" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.114114 4868 scope.go:117] "RemoveContainer" containerID="0a904fd22016b58ec9d3e59a02c6b9dbf80697d1260ac024ddfd53faca68324d" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.146102 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tgldm\" (UniqueName: \"kubernetes.io/projected/aee3da59-5b6a-4282-8783-e2b1260a93a8-kube-api-access-tgldm\") on node \"crc\" DevicePath \"\"" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.146143 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.216311 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "aee3da59-5b6a-4282-8783-e2b1260a93a8" (UID: "aee3da59-5b6a-4282-8783-e2b1260a93a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.247751 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/aee3da59-5b6a-4282-8783-e2b1260a93a8-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.371540 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-nsknp"] Oct 03 13:41:19 crc kubenswrapper[4868]: I1003 13:41:19.379043 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-nsknp"] Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.050489 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-bqjbv" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="registry-server" containerID="cri-o://e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973" gracePeriod=2 Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.527761 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.558748 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" path="/var/lib/kubelet/pods/aee3da59-5b6a-4282-8783-e2b1260a93a8/volumes" Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.575154 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-catalog-content\") pod \"a591139e-7de1-417e-aecb-ccd4c2c105df\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.575494 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kdbf\" (UniqueName: \"kubernetes.io/projected/a591139e-7de1-417e-aecb-ccd4c2c105df-kube-api-access-7kdbf\") pod \"a591139e-7de1-417e-aecb-ccd4c2c105df\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.575717 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-utilities\") pod \"a591139e-7de1-417e-aecb-ccd4c2c105df\" (UID: \"a591139e-7de1-417e-aecb-ccd4c2c105df\") " Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.576741 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-utilities" (OuterVolumeSpecName: "utilities") pod "a591139e-7de1-417e-aecb-ccd4c2c105df" (UID: "a591139e-7de1-417e-aecb-ccd4c2c105df"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.589514 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a591139e-7de1-417e-aecb-ccd4c2c105df" (UID: "a591139e-7de1-417e-aecb-ccd4c2c105df"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.600863 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a591139e-7de1-417e-aecb-ccd4c2c105df-kube-api-access-7kdbf" (OuterVolumeSpecName: "kube-api-access-7kdbf") pod "a591139e-7de1-417e-aecb-ccd4c2c105df" (UID: "a591139e-7de1-417e-aecb-ccd4c2c105df"). InnerVolumeSpecName "kube-api-access-7kdbf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.678458 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.678500 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kdbf\" (UniqueName: \"kubernetes.io/projected/a591139e-7de1-417e-aecb-ccd4c2c105df-kube-api-access-7kdbf\") on node \"crc\" DevicePath \"\"" Oct 03 13:41:20 crc kubenswrapper[4868]: I1003 13:41:20.678513 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a591139e-7de1-417e-aecb-ccd4c2c105df-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.063337 4868 generic.go:334] "Generic (PLEG): container finished" podID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerID="e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973" exitCode=0 Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.063397 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqjbv" event={"ID":"a591139e-7de1-417e-aecb-ccd4c2c105df","Type":"ContainerDied","Data":"e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973"} Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.063410 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-bqjbv" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.063426 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-bqjbv" event={"ID":"a591139e-7de1-417e-aecb-ccd4c2c105df","Type":"ContainerDied","Data":"848b8ad2646bbc86af8f66b01779762c2e344d72183c2afb9cc700299ee55dce"} Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.063444 4868 scope.go:117] "RemoveContainer" containerID="e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.097355 4868 scope.go:117] "RemoveContainer" containerID="1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.108147 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqjbv"] Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.116662 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-bqjbv"] Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.120660 4868 scope.go:117] "RemoveContainer" containerID="b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.158842 4868 scope.go:117] "RemoveContainer" containerID="e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973" Oct 03 13:41:21 crc kubenswrapper[4868]: E1003 13:41:21.159336 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973\": container with ID starting with e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973 not found: ID does not exist" containerID="e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.159411 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973"} err="failed to get container status \"e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973\": rpc error: code = NotFound desc = could not find container \"e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973\": container with ID starting with e268c62db7efa7ea41ec9039b7de29059921acb69ce5b56faf149c4e7e2c4973 not found: ID does not exist" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.159438 4868 scope.go:117] "RemoveContainer" containerID="1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91" Oct 03 13:41:21 crc kubenswrapper[4868]: E1003 13:41:21.159711 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91\": container with ID starting with 1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91 not found: ID does not exist" containerID="1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.159741 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91"} err="failed to get container status \"1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91\": rpc error: code = NotFound desc = could not find container \"1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91\": container with ID starting with 1c3685e82986fa5fb43d79feda3d8fe18849303d452eb4c4cacfb5d156179b91 not found: ID does not exist" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.159764 4868 scope.go:117] "RemoveContainer" containerID="b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264" Oct 03 13:41:21 crc kubenswrapper[4868]: E1003 13:41:21.160040 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264\": container with ID starting with b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264 not found: ID does not exist" containerID="b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264" Oct 03 13:41:21 crc kubenswrapper[4868]: I1003 13:41:21.160103 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264"} err="failed to get container status \"b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264\": rpc error: code = NotFound desc = could not find container \"b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264\": container with ID starting with b58ae33bfea553a057a0a2cfa70cf597ae9b2a6463be48ce3656440094a5b264 not found: ID does not exist" Oct 03 13:41:22 crc kubenswrapper[4868]: I1003 13:41:22.554380 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" path="/var/lib/kubelet/pods/a591139e-7de1-417e-aecb-ccd4c2c105df/volumes" Oct 03 13:41:29 crc kubenswrapper[4868]: I1003 13:41:29.544462 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:41:29 crc kubenswrapper[4868]: E1003 13:41:29.545890 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:41:40 crc kubenswrapper[4868]: I1003 13:41:40.544345 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:41:40 crc kubenswrapper[4868]: E1003 13:41:40.545027 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:41:55 crc kubenswrapper[4868]: I1003 13:41:55.544211 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:41:55 crc kubenswrapper[4868]: E1003 13:41:55.544913 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:42:08 crc kubenswrapper[4868]: I1003 13:42:08.544539 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:42:08 crc kubenswrapper[4868]: E1003 13:42:08.545397 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:42:20 crc kubenswrapper[4868]: I1003 13:42:20.544683 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:42:20 crc kubenswrapper[4868]: E1003 13:42:20.545427 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:42:33 crc kubenswrapper[4868]: I1003 13:42:33.544328 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:42:33 crc kubenswrapper[4868]: E1003 13:42:33.545218 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:42:48 crc kubenswrapper[4868]: I1003 13:42:48.543758 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:42:48 crc kubenswrapper[4868]: E1003 13:42:48.544553 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:43:03 crc kubenswrapper[4868]: I1003 13:43:03.544633 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:43:03 crc kubenswrapper[4868]: E1003 13:43:03.545496 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:43:15 crc kubenswrapper[4868]: I1003 13:43:15.543924 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:43:15 crc kubenswrapper[4868]: E1003 13:43:15.544872 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:43:29 crc kubenswrapper[4868]: I1003 13:43:29.544082 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:43:29 crc kubenswrapper[4868]: E1003 13:43:29.544983 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:43:40 crc kubenswrapper[4868]: I1003 13:43:40.544655 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:43:40 crc kubenswrapper[4868]: E1003 13:43:40.545382 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:43:54 crc kubenswrapper[4868]: I1003 13:43:54.565488 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:43:54 crc kubenswrapper[4868]: E1003 13:43:54.566616 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:44:08 crc kubenswrapper[4868]: I1003 13:44:08.544945 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:44:08 crc kubenswrapper[4868]: E1003 13:44:08.546317 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:44:19 crc kubenswrapper[4868]: I1003 13:44:19.545070 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:44:19 crc kubenswrapper[4868]: E1003 13:44:19.545860 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:44:34 crc kubenswrapper[4868]: I1003 13:44:34.552627 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:44:35 crc kubenswrapper[4868]: I1003 13:44:35.945619 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"b17b3bf423291b4d2088cb249d28ca3f9225b98b9c046dc8e3bed2d81c3cce54"} Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.180311 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7"] Oct 03 13:45:00 crc kubenswrapper[4868]: E1003 13:45:00.181785 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="extract-utilities" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.181804 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="extract-utilities" Oct 03 13:45:00 crc kubenswrapper[4868]: E1003 13:45:00.181849 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="extract-utilities" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.181856 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="extract-utilities" Oct 03 13:45:00 crc kubenswrapper[4868]: E1003 13:45:00.181869 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="extract-content" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.181877 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="extract-content" Oct 03 13:45:00 crc kubenswrapper[4868]: E1003 13:45:00.181889 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="registry-server" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.181895 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="registry-server" Oct 03 13:45:00 crc kubenswrapper[4868]: E1003 13:45:00.181911 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="extract-content" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.181917 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="extract-content" Oct 03 13:45:00 crc kubenswrapper[4868]: E1003 13:45:00.181938 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="registry-server" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.181946 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="registry-server" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.182187 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="aee3da59-5b6a-4282-8783-e2b1260a93a8" containerName="registry-server" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.182205 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="a591139e-7de1-417e-aecb-ccd4c2c105df" containerName="registry-server" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.183282 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.187881 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.187926 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.209990 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7"] Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.348623 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts6lp\" (UniqueName: \"kubernetes.io/projected/394a703b-7249-424d-9366-931e79eaad18-kube-api-access-ts6lp\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.348878 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/394a703b-7249-424d-9366-931e79eaad18-config-volume\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.349181 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/394a703b-7249-424d-9366-931e79eaad18-secret-volume\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.451091 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/394a703b-7249-424d-9366-931e79eaad18-config-volume\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.451233 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/394a703b-7249-424d-9366-931e79eaad18-secret-volume\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.451280 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts6lp\" (UniqueName: \"kubernetes.io/projected/394a703b-7249-424d-9366-931e79eaad18-kube-api-access-ts6lp\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.452084 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/394a703b-7249-424d-9366-931e79eaad18-config-volume\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.458677 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/394a703b-7249-424d-9366-931e79eaad18-secret-volume\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.469324 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts6lp\" (UniqueName: \"kubernetes.io/projected/394a703b-7249-424d-9366-931e79eaad18-kube-api-access-ts6lp\") pod \"collect-profiles-29324985-2gws7\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.515118 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:00 crc kubenswrapper[4868]: I1003 13:45:00.954181 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7"] Oct 03 13:45:01 crc kubenswrapper[4868]: I1003 13:45:01.186694 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" event={"ID":"394a703b-7249-424d-9366-931e79eaad18","Type":"ContainerStarted","Data":"d8e6f37475851427eda022af6c5f8285750b5aad0c547539c478b602804736d9"} Oct 03 13:45:01 crc kubenswrapper[4868]: I1003 13:45:01.186750 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" event={"ID":"394a703b-7249-424d-9366-931e79eaad18","Type":"ContainerStarted","Data":"50a124fb1233f0ecfcdab304fea98456f51c516414a875aa7a2b10d9b9722829"} Oct 03 13:45:01 crc kubenswrapper[4868]: I1003 13:45:01.207376 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" podStartSLOduration=1.207356488 podStartE2EDuration="1.207356488s" podCreationTimestamp="2025-10-03 13:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 13:45:01.200878965 +0000 UTC m=+3297.410728051" watchObservedRunningTime="2025-10-03 13:45:01.207356488 +0000 UTC m=+3297.417205544" Oct 03 13:45:02 crc kubenswrapper[4868]: I1003 13:45:02.197297 4868 generic.go:334] "Generic (PLEG): container finished" podID="394a703b-7249-424d-9366-931e79eaad18" containerID="d8e6f37475851427eda022af6c5f8285750b5aad0c547539c478b602804736d9" exitCode=0 Oct 03 13:45:02 crc kubenswrapper[4868]: I1003 13:45:02.197356 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" event={"ID":"394a703b-7249-424d-9366-931e79eaad18","Type":"ContainerDied","Data":"d8e6f37475851427eda022af6c5f8285750b5aad0c547539c478b602804736d9"} Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.595342 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.731897 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/394a703b-7249-424d-9366-931e79eaad18-secret-volume\") pod \"394a703b-7249-424d-9366-931e79eaad18\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.732222 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/394a703b-7249-424d-9366-931e79eaad18-config-volume\") pod \"394a703b-7249-424d-9366-931e79eaad18\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.732407 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ts6lp\" (UniqueName: \"kubernetes.io/projected/394a703b-7249-424d-9366-931e79eaad18-kube-api-access-ts6lp\") pod \"394a703b-7249-424d-9366-931e79eaad18\" (UID: \"394a703b-7249-424d-9366-931e79eaad18\") " Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.732930 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/394a703b-7249-424d-9366-931e79eaad18-config-volume" (OuterVolumeSpecName: "config-volume") pod "394a703b-7249-424d-9366-931e79eaad18" (UID: "394a703b-7249-424d-9366-931e79eaad18"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.733468 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/394a703b-7249-424d-9366-931e79eaad18-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.740077 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/394a703b-7249-424d-9366-931e79eaad18-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "394a703b-7249-424d-9366-931e79eaad18" (UID: "394a703b-7249-424d-9366-931e79eaad18"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.740392 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/394a703b-7249-424d-9366-931e79eaad18-kube-api-access-ts6lp" (OuterVolumeSpecName: "kube-api-access-ts6lp") pod "394a703b-7249-424d-9366-931e79eaad18" (UID: "394a703b-7249-424d-9366-931e79eaad18"). InnerVolumeSpecName "kube-api-access-ts6lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.836138 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ts6lp\" (UniqueName: \"kubernetes.io/projected/394a703b-7249-424d-9366-931e79eaad18-kube-api-access-ts6lp\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:03 crc kubenswrapper[4868]: I1003 13:45:03.836203 4868 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/394a703b-7249-424d-9366-931e79eaad18-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 13:45:04 crc kubenswrapper[4868]: I1003 13:45:04.267188 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" Oct 03 13:45:04 crc kubenswrapper[4868]: I1003 13:45:04.267447 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29324985-2gws7" event={"ID":"394a703b-7249-424d-9366-931e79eaad18","Type":"ContainerDied","Data":"50a124fb1233f0ecfcdab304fea98456f51c516414a875aa7a2b10d9b9722829"} Oct 03 13:45:04 crc kubenswrapper[4868]: I1003 13:45:04.267485 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50a124fb1233f0ecfcdab304fea98456f51c516414a875aa7a2b10d9b9722829" Oct 03 13:45:04 crc kubenswrapper[4868]: I1003 13:45:04.288307 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5"] Oct 03 13:45:04 crc kubenswrapper[4868]: I1003 13:45:04.301962 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324940-nj5j5"] Oct 03 13:45:04 crc kubenswrapper[4868]: I1003 13:45:04.557514 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428" path="/var/lib/kubelet/pods/e9ade3a7-9f02-4bd3-8ee5-4d2c26fc5428/volumes" Oct 03 13:45:16 crc kubenswrapper[4868]: I1003 13:45:16.373917 4868 scope.go:117] "RemoveContainer" containerID="e71b335f0fb53e1fddffb2fe0a85d3e50c4ce42936b5ef0bd0c9532f0ec21053" Oct 03 13:47:02 crc kubenswrapper[4868]: I1003 13:47:02.145815 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:47:02 crc kubenswrapper[4868]: I1003 13:47:02.146774 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:47:32 crc kubenswrapper[4868]: I1003 13:47:32.145802 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:47:32 crc kubenswrapper[4868]: I1003 13:47:32.146430 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.575384 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-59sbn"] Oct 03 13:47:43 crc kubenswrapper[4868]: E1003 13:47:43.576892 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="394a703b-7249-424d-9366-931e79eaad18" containerName="collect-profiles" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.576916 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="394a703b-7249-424d-9366-931e79eaad18" containerName="collect-profiles" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.577490 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="394a703b-7249-424d-9366-931e79eaad18" containerName="collect-profiles" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.579328 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.589419 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-59sbn"] Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.664365 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q95g\" (UniqueName: \"kubernetes.io/projected/538bc952-c932-48c8-a0b5-b065e87bb7bc-kube-api-access-2q95g\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.664631 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-utilities\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.664932 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-catalog-content\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.766971 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-utilities\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.767103 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-catalog-content\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.767247 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q95g\" (UniqueName: \"kubernetes.io/projected/538bc952-c932-48c8-a0b5-b065e87bb7bc-kube-api-access-2q95g\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.767828 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-utilities\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.767868 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-catalog-content\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.802539 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q95g\" (UniqueName: \"kubernetes.io/projected/538bc952-c932-48c8-a0b5-b065e87bb7bc-kube-api-access-2q95g\") pod \"redhat-operators-59sbn\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:43 crc kubenswrapper[4868]: I1003 13:47:43.911861 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:44 crc kubenswrapper[4868]: I1003 13:47:44.442234 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-59sbn"] Oct 03 13:47:44 crc kubenswrapper[4868]: I1003 13:47:44.925817 4868 generic.go:334] "Generic (PLEG): container finished" podID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerID="1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23" exitCode=0 Oct 03 13:47:44 crc kubenswrapper[4868]: I1003 13:47:44.925933 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59sbn" event={"ID":"538bc952-c932-48c8-a0b5-b065e87bb7bc","Type":"ContainerDied","Data":"1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23"} Oct 03 13:47:44 crc kubenswrapper[4868]: I1003 13:47:44.926162 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59sbn" event={"ID":"538bc952-c932-48c8-a0b5-b065e87bb7bc","Type":"ContainerStarted","Data":"b48441e82d89afaf8c043c7be6d8946efd89e7b0cb8685daef61e25c22da7bf1"} Oct 03 13:47:44 crc kubenswrapper[4868]: I1003 13:47:44.928468 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:47:46 crc kubenswrapper[4868]: I1003 13:47:46.949835 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59sbn" event={"ID":"538bc952-c932-48c8-a0b5-b065e87bb7bc","Type":"ContainerStarted","Data":"5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9"} Oct 03 13:47:47 crc kubenswrapper[4868]: I1003 13:47:47.974112 4868 generic.go:334] "Generic (PLEG): container finished" podID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerID="5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9" exitCode=0 Oct 03 13:47:47 crc kubenswrapper[4868]: I1003 13:47:47.974229 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59sbn" event={"ID":"538bc952-c932-48c8-a0b5-b065e87bb7bc","Type":"ContainerDied","Data":"5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9"} Oct 03 13:47:48 crc kubenswrapper[4868]: I1003 13:47:48.991326 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59sbn" event={"ID":"538bc952-c932-48c8-a0b5-b065e87bb7bc","Type":"ContainerStarted","Data":"461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4"} Oct 03 13:47:49 crc kubenswrapper[4868]: I1003 13:47:49.015777 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-59sbn" podStartSLOduration=2.308235253 podStartE2EDuration="6.015755284s" podCreationTimestamp="2025-10-03 13:47:43 +0000 UTC" firstStartedPulling="2025-10-03 13:47:44.928233991 +0000 UTC m=+3461.138083057" lastFinishedPulling="2025-10-03 13:47:48.635754022 +0000 UTC m=+3464.845603088" observedRunningTime="2025-10-03 13:47:49.010640577 +0000 UTC m=+3465.220489643" watchObservedRunningTime="2025-10-03 13:47:49.015755284 +0000 UTC m=+3465.225604350" Oct 03 13:47:53 crc kubenswrapper[4868]: I1003 13:47:53.912071 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:53 crc kubenswrapper[4868]: I1003 13:47:53.913203 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:47:54 crc kubenswrapper[4868]: I1003 13:47:54.970981 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-59sbn" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="registry-server" probeResult="failure" output=< Oct 03 13:47:54 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 13:47:54 crc kubenswrapper[4868]: > Oct 03 13:48:02 crc kubenswrapper[4868]: I1003 13:48:02.145342 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:48:02 crc kubenswrapper[4868]: I1003 13:48:02.146247 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:48:02 crc kubenswrapper[4868]: I1003 13:48:02.146313 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:48:02 crc kubenswrapper[4868]: I1003 13:48:02.147373 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b17b3bf423291b4d2088cb249d28ca3f9225b98b9c046dc8e3bed2d81c3cce54"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:48:02 crc kubenswrapper[4868]: I1003 13:48:02.147448 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://b17b3bf423291b4d2088cb249d28ca3f9225b98b9c046dc8e3bed2d81c3cce54" gracePeriod=600 Oct 03 13:48:03 crc kubenswrapper[4868]: I1003 13:48:03.161256 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="b17b3bf423291b4d2088cb249d28ca3f9225b98b9c046dc8e3bed2d81c3cce54" exitCode=0 Oct 03 13:48:03 crc kubenswrapper[4868]: I1003 13:48:03.161832 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"b17b3bf423291b4d2088cb249d28ca3f9225b98b9c046dc8e3bed2d81c3cce54"} Oct 03 13:48:03 crc kubenswrapper[4868]: I1003 13:48:03.161861 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b"} Oct 03 13:48:03 crc kubenswrapper[4868]: I1003 13:48:03.161879 4868 scope.go:117] "RemoveContainer" containerID="662173a6d8bb1a72367dfcc7aaef0eee7a38b0a7cd63213ec3e3fb254d165ee3" Oct 03 13:48:03 crc kubenswrapper[4868]: I1003 13:48:03.971548 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:48:04 crc kubenswrapper[4868]: I1003 13:48:04.027528 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:48:04 crc kubenswrapper[4868]: I1003 13:48:04.218028 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-59sbn"] Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.184521 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-59sbn" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="registry-server" containerID="cri-o://461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4" gracePeriod=2 Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.646967 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.664418 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-utilities\") pod \"538bc952-c932-48c8-a0b5-b065e87bb7bc\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.664595 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-catalog-content\") pod \"538bc952-c932-48c8-a0b5-b065e87bb7bc\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.664873 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2q95g\" (UniqueName: \"kubernetes.io/projected/538bc952-c932-48c8-a0b5-b065e87bb7bc-kube-api-access-2q95g\") pod \"538bc952-c932-48c8-a0b5-b065e87bb7bc\" (UID: \"538bc952-c932-48c8-a0b5-b065e87bb7bc\") " Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.665195 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-utilities" (OuterVolumeSpecName: "utilities") pod "538bc952-c932-48c8-a0b5-b065e87bb7bc" (UID: "538bc952-c932-48c8-a0b5-b065e87bb7bc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.667299 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.673521 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/538bc952-c932-48c8-a0b5-b065e87bb7bc-kube-api-access-2q95g" (OuterVolumeSpecName: "kube-api-access-2q95g") pod "538bc952-c932-48c8-a0b5-b065e87bb7bc" (UID: "538bc952-c932-48c8-a0b5-b065e87bb7bc"). InnerVolumeSpecName "kube-api-access-2q95g". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.743259 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "538bc952-c932-48c8-a0b5-b065e87bb7bc" (UID: "538bc952-c932-48c8-a0b5-b065e87bb7bc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.768801 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/538bc952-c932-48c8-a0b5-b065e87bb7bc-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:48:05 crc kubenswrapper[4868]: I1003 13:48:05.768842 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2q95g\" (UniqueName: \"kubernetes.io/projected/538bc952-c932-48c8-a0b5-b065e87bb7bc-kube-api-access-2q95g\") on node \"crc\" DevicePath \"\"" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.197296 4868 generic.go:334] "Generic (PLEG): container finished" podID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerID="461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4" exitCode=0 Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.197339 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59sbn" event={"ID":"538bc952-c932-48c8-a0b5-b065e87bb7bc","Type":"ContainerDied","Data":"461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4"} Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.197375 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-59sbn" event={"ID":"538bc952-c932-48c8-a0b5-b065e87bb7bc","Type":"ContainerDied","Data":"b48441e82d89afaf8c043c7be6d8946efd89e7b0cb8685daef61e25c22da7bf1"} Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.197384 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-59sbn" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.197408 4868 scope.go:117] "RemoveContainer" containerID="461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.222222 4868 scope.go:117] "RemoveContainer" containerID="5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.238356 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-59sbn"] Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.247726 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-59sbn"] Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.248224 4868 scope.go:117] "RemoveContainer" containerID="1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.297754 4868 scope.go:117] "RemoveContainer" containerID="461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4" Oct 03 13:48:06 crc kubenswrapper[4868]: E1003 13:48:06.298253 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4\": container with ID starting with 461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4 not found: ID does not exist" containerID="461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.298312 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4"} err="failed to get container status \"461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4\": rpc error: code = NotFound desc = could not find container \"461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4\": container with ID starting with 461d7a3f501f4487abe764eb23c1bcaa5ec3b8fe5264c5f12d68a0c5e267d7a4 not found: ID does not exist" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.298342 4868 scope.go:117] "RemoveContainer" containerID="5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9" Oct 03 13:48:06 crc kubenswrapper[4868]: E1003 13:48:06.298753 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9\": container with ID starting with 5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9 not found: ID does not exist" containerID="5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.298861 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9"} err="failed to get container status \"5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9\": rpc error: code = NotFound desc = could not find container \"5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9\": container with ID starting with 5af5f77f957531b361251c0cc4fc0e5bf80044e1217d5db308266f063c4945a9 not found: ID does not exist" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.299018 4868 scope.go:117] "RemoveContainer" containerID="1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23" Oct 03 13:48:06 crc kubenswrapper[4868]: E1003 13:48:06.299578 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23\": container with ID starting with 1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23 not found: ID does not exist" containerID="1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.299642 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23"} err="failed to get container status \"1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23\": rpc error: code = NotFound desc = could not find container \"1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23\": container with ID starting with 1eecea3ac55612af2e8bdffc8440f75759f8ae5453cef37ccc6899da1e333a23 not found: ID does not exist" Oct 03 13:48:06 crc kubenswrapper[4868]: I1003 13:48:06.559546 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" path="/var/lib/kubelet/pods/538bc952-c932-48c8-a0b5-b065e87bb7bc/volumes" Oct 03 13:48:41 crc kubenswrapper[4868]: I1003 13:48:41.895985 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mxflj"] Oct 03 13:48:41 crc kubenswrapper[4868]: E1003 13:48:41.897583 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="registry-server" Oct 03 13:48:41 crc kubenswrapper[4868]: I1003 13:48:41.897605 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="registry-server" Oct 03 13:48:41 crc kubenswrapper[4868]: E1003 13:48:41.897657 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="extract-content" Oct 03 13:48:41 crc kubenswrapper[4868]: I1003 13:48:41.897666 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="extract-content" Oct 03 13:48:41 crc kubenswrapper[4868]: E1003 13:48:41.897693 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="extract-utilities" Oct 03 13:48:41 crc kubenswrapper[4868]: I1003 13:48:41.897700 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="extract-utilities" Oct 03 13:48:41 crc kubenswrapper[4868]: I1003 13:48:41.898132 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="538bc952-c932-48c8-a0b5-b065e87bb7bc" containerName="registry-server" Oct 03 13:48:41 crc kubenswrapper[4868]: I1003 13:48:41.901006 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:41 crc kubenswrapper[4868]: I1003 13:48:41.924358 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mxflj"] Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.074422 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-utilities\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.074530 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-catalog-content\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.074821 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vt6hl\" (UniqueName: \"kubernetes.io/projected/18130cc4-02c3-4132-8fbd-549fa490f4b5-kube-api-access-vt6hl\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.176452 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-utilities\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.176537 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-catalog-content\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.176650 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vt6hl\" (UniqueName: \"kubernetes.io/projected/18130cc4-02c3-4132-8fbd-549fa490f4b5-kube-api-access-vt6hl\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.177240 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-utilities\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.177246 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-catalog-content\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.210642 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vt6hl\" (UniqueName: \"kubernetes.io/projected/18130cc4-02c3-4132-8fbd-549fa490f4b5-kube-api-access-vt6hl\") pod \"certified-operators-mxflj\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.245137 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:42 crc kubenswrapper[4868]: I1003 13:48:42.909543 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mxflj"] Oct 03 13:48:43 crc kubenswrapper[4868]: I1003 13:48:43.620751 4868 generic.go:334] "Generic (PLEG): container finished" podID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerID="ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520" exitCode=0 Oct 03 13:48:43 crc kubenswrapper[4868]: I1003 13:48:43.620958 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxflj" event={"ID":"18130cc4-02c3-4132-8fbd-549fa490f4b5","Type":"ContainerDied","Data":"ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520"} Oct 03 13:48:43 crc kubenswrapper[4868]: I1003 13:48:43.621148 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxflj" event={"ID":"18130cc4-02c3-4132-8fbd-549fa490f4b5","Type":"ContainerStarted","Data":"ebefabadbbaed1f01ddfd41cca2ceb2751bc9ac4c8841bb9bcbd8193dfab59ee"} Oct 03 13:48:45 crc kubenswrapper[4868]: I1003 13:48:45.639658 4868 generic.go:334] "Generic (PLEG): container finished" podID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerID="8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2" exitCode=0 Oct 03 13:48:45 crc kubenswrapper[4868]: I1003 13:48:45.639790 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxflj" event={"ID":"18130cc4-02c3-4132-8fbd-549fa490f4b5","Type":"ContainerDied","Data":"8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2"} Oct 03 13:48:46 crc kubenswrapper[4868]: I1003 13:48:46.652855 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxflj" event={"ID":"18130cc4-02c3-4132-8fbd-549fa490f4b5","Type":"ContainerStarted","Data":"6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57"} Oct 03 13:48:46 crc kubenswrapper[4868]: I1003 13:48:46.675853 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mxflj" podStartSLOduration=2.988895045 podStartE2EDuration="5.67583435s" podCreationTimestamp="2025-10-03 13:48:41 +0000 UTC" firstStartedPulling="2025-10-03 13:48:43.623224615 +0000 UTC m=+3519.833073681" lastFinishedPulling="2025-10-03 13:48:46.31016392 +0000 UTC m=+3522.520012986" observedRunningTime="2025-10-03 13:48:46.673512577 +0000 UTC m=+3522.883361643" watchObservedRunningTime="2025-10-03 13:48:46.67583435 +0000 UTC m=+3522.885683416" Oct 03 13:48:52 crc kubenswrapper[4868]: I1003 13:48:52.246251 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:52 crc kubenswrapper[4868]: I1003 13:48:52.246899 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:52 crc kubenswrapper[4868]: I1003 13:48:52.304106 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:52 crc kubenswrapper[4868]: I1003 13:48:52.750461 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:52 crc kubenswrapper[4868]: I1003 13:48:52.800487 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mxflj"] Oct 03 13:48:54 crc kubenswrapper[4868]: I1003 13:48:54.742289 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mxflj" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="registry-server" containerID="cri-o://6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57" gracePeriod=2 Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.198725 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.349323 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-catalog-content\") pod \"18130cc4-02c3-4132-8fbd-549fa490f4b5\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.349564 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-utilities\") pod \"18130cc4-02c3-4132-8fbd-549fa490f4b5\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.349618 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt6hl\" (UniqueName: \"kubernetes.io/projected/18130cc4-02c3-4132-8fbd-549fa490f4b5-kube-api-access-vt6hl\") pod \"18130cc4-02c3-4132-8fbd-549fa490f4b5\" (UID: \"18130cc4-02c3-4132-8fbd-549fa490f4b5\") " Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.350860 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-utilities" (OuterVolumeSpecName: "utilities") pod "18130cc4-02c3-4132-8fbd-549fa490f4b5" (UID: "18130cc4-02c3-4132-8fbd-549fa490f4b5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.357824 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18130cc4-02c3-4132-8fbd-549fa490f4b5-kube-api-access-vt6hl" (OuterVolumeSpecName: "kube-api-access-vt6hl") pod "18130cc4-02c3-4132-8fbd-549fa490f4b5" (UID: "18130cc4-02c3-4132-8fbd-549fa490f4b5"). InnerVolumeSpecName "kube-api-access-vt6hl". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.404492 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "18130cc4-02c3-4132-8fbd-549fa490f4b5" (UID: "18130cc4-02c3-4132-8fbd-549fa490f4b5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.452023 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.452100 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt6hl\" (UniqueName: \"kubernetes.io/projected/18130cc4-02c3-4132-8fbd-549fa490f4b5-kube-api-access-vt6hl\") on node \"crc\" DevicePath \"\"" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.452117 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18130cc4-02c3-4132-8fbd-549fa490f4b5-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.772513 4868 generic.go:334] "Generic (PLEG): container finished" podID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerID="6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57" exitCode=0 Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.772587 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxflj" event={"ID":"18130cc4-02c3-4132-8fbd-549fa490f4b5","Type":"ContainerDied","Data":"6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57"} Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.772634 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mxflj" event={"ID":"18130cc4-02c3-4132-8fbd-549fa490f4b5","Type":"ContainerDied","Data":"ebefabadbbaed1f01ddfd41cca2ceb2751bc9ac4c8841bb9bcbd8193dfab59ee"} Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.772634 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mxflj" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.772662 4868 scope.go:117] "RemoveContainer" containerID="6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.877867 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mxflj"] Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.878435 4868 scope.go:117] "RemoveContainer" containerID="8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.896641 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mxflj"] Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.913373 4868 scope.go:117] "RemoveContainer" containerID="ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.963319 4868 scope.go:117] "RemoveContainer" containerID="6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57" Oct 03 13:48:55 crc kubenswrapper[4868]: E1003 13:48:55.963763 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57\": container with ID starting with 6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57 not found: ID does not exist" containerID="6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.963800 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57"} err="failed to get container status \"6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57\": rpc error: code = NotFound desc = could not find container \"6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57\": container with ID starting with 6d40da87268a6d44c0fed08f7ebc1d8097fcbdf596372ac7186bbb6ad5af9f57 not found: ID does not exist" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.963821 4868 scope.go:117] "RemoveContainer" containerID="8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2" Oct 03 13:48:55 crc kubenswrapper[4868]: E1003 13:48:55.964079 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2\": container with ID starting with 8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2 not found: ID does not exist" containerID="8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.964109 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2"} err="failed to get container status \"8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2\": rpc error: code = NotFound desc = could not find container \"8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2\": container with ID starting with 8c9b0f5632d40f3cdacf4b2ad3301a8d1ac381f316880de66fb16b913a3603b2 not found: ID does not exist" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.964124 4868 scope.go:117] "RemoveContainer" containerID="ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520" Oct 03 13:48:55 crc kubenswrapper[4868]: E1003 13:48:55.964319 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520\": container with ID starting with ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520 not found: ID does not exist" containerID="ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520" Oct 03 13:48:55 crc kubenswrapper[4868]: I1003 13:48:55.964342 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520"} err="failed to get container status \"ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520\": rpc error: code = NotFound desc = could not find container \"ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520\": container with ID starting with ab73cafecdbf4d0e98402351bca4cfb2879eed19e73a78873ea088f942a3c520 not found: ID does not exist" Oct 03 13:48:56 crc kubenswrapper[4868]: I1003 13:48:56.555572 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" path="/var/lib/kubelet/pods/18130cc4-02c3-4132-8fbd-549fa490f4b5/volumes" Oct 03 13:50:02 crc kubenswrapper[4868]: I1003 13:50:02.145032 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:50:02 crc kubenswrapper[4868]: I1003 13:50:02.145945 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:50:32 crc kubenswrapper[4868]: I1003 13:50:32.145372 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:50:32 crc kubenswrapper[4868]: I1003 13:50:32.145869 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:51:02 crc kubenswrapper[4868]: I1003 13:51:02.146133 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:51:02 crc kubenswrapper[4868]: I1003 13:51:02.146866 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:51:02 crc kubenswrapper[4868]: I1003 13:51:02.146937 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:51:02 crc kubenswrapper[4868]: I1003 13:51:02.148276 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:51:02 crc kubenswrapper[4868]: I1003 13:51:02.148397 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" gracePeriod=600 Oct 03 13:51:02 crc kubenswrapper[4868]: E1003 13:51:02.270933 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:51:03 crc kubenswrapper[4868]: I1003 13:51:03.010918 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" exitCode=0 Oct 03 13:51:03 crc kubenswrapper[4868]: I1003 13:51:03.010965 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b"} Oct 03 13:51:03 crc kubenswrapper[4868]: I1003 13:51:03.011000 4868 scope.go:117] "RemoveContainer" containerID="b17b3bf423291b4d2088cb249d28ca3f9225b98b9c046dc8e3bed2d81c3cce54" Oct 03 13:51:03 crc kubenswrapper[4868]: I1003 13:51:03.011584 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:51:03 crc kubenswrapper[4868]: E1003 13:51:03.012008 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:51:16 crc kubenswrapper[4868]: I1003 13:51:16.544762 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:51:16 crc kubenswrapper[4868]: E1003 13:51:16.545694 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:51:30 crc kubenswrapper[4868]: I1003 13:51:30.544810 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:51:30 crc kubenswrapper[4868]: E1003 13:51:30.545769 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:51:45 crc kubenswrapper[4868]: I1003 13:51:45.545196 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:51:45 crc kubenswrapper[4868]: E1003 13:51:45.546446 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:51:59 crc kubenswrapper[4868]: I1003 13:51:59.545439 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:51:59 crc kubenswrapper[4868]: E1003 13:51:59.547106 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:52:11 crc kubenswrapper[4868]: I1003 13:52:11.544719 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:52:11 crc kubenswrapper[4868]: E1003 13:52:11.545922 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.540922 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-jtv5b"] Oct 03 13:52:23 crc kubenswrapper[4868]: E1003 13:52:23.541918 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="registry-server" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.541933 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="registry-server" Oct 03 13:52:23 crc kubenswrapper[4868]: E1003 13:52:23.541953 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="extract-utilities" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.541960 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="extract-utilities" Oct 03 13:52:23 crc kubenswrapper[4868]: E1003 13:52:23.541980 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="extract-content" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.541987 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="extract-content" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.542193 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="18130cc4-02c3-4132-8fbd-549fa490f4b5" containerName="registry-server" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.544346 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.554128 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jtv5b"] Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.609409 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-utilities\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.609865 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lc97h\" (UniqueName: \"kubernetes.io/projected/b0ee712b-3136-4d6c-a8ee-0a136a025480-kube-api-access-lc97h\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.610117 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-catalog-content\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.711874 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-catalog-content\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.711937 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-utilities\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.712006 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lc97h\" (UniqueName: \"kubernetes.io/projected/b0ee712b-3136-4d6c-a8ee-0a136a025480-kube-api-access-lc97h\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.712590 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-catalog-content\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.712719 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-utilities\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.730764 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lc97h\" (UniqueName: \"kubernetes.io/projected/b0ee712b-3136-4d6c-a8ee-0a136a025480-kube-api-access-lc97h\") pod \"community-operators-jtv5b\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:23 crc kubenswrapper[4868]: I1003 13:52:23.870029 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:24 crc kubenswrapper[4868]: I1003 13:52:24.403555 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-jtv5b"] Oct 03 13:52:24 crc kubenswrapper[4868]: I1003 13:52:24.553573 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:52:24 crc kubenswrapper[4868]: E1003 13:52:24.554585 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:52:24 crc kubenswrapper[4868]: I1003 13:52:24.830976 4868 generic.go:334] "Generic (PLEG): container finished" podID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerID="6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7" exitCode=0 Oct 03 13:52:24 crc kubenswrapper[4868]: I1003 13:52:24.831039 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtv5b" event={"ID":"b0ee712b-3136-4d6c-a8ee-0a136a025480","Type":"ContainerDied","Data":"6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7"} Oct 03 13:52:24 crc kubenswrapper[4868]: I1003 13:52:24.831106 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtv5b" event={"ID":"b0ee712b-3136-4d6c-a8ee-0a136a025480","Type":"ContainerStarted","Data":"24ce8b6d47f47c929b062fad67c551a24a80d53ab29dabd94d1179187862595b"} Oct 03 13:52:26 crc kubenswrapper[4868]: I1003 13:52:26.850628 4868 generic.go:334] "Generic (PLEG): container finished" podID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerID="8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533" exitCode=0 Oct 03 13:52:26 crc kubenswrapper[4868]: I1003 13:52:26.850867 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtv5b" event={"ID":"b0ee712b-3136-4d6c-a8ee-0a136a025480","Type":"ContainerDied","Data":"8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533"} Oct 03 13:52:27 crc kubenswrapper[4868]: I1003 13:52:27.861998 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtv5b" event={"ID":"b0ee712b-3136-4d6c-a8ee-0a136a025480","Type":"ContainerStarted","Data":"6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d"} Oct 03 13:52:27 crc kubenswrapper[4868]: I1003 13:52:27.890266 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-jtv5b" podStartSLOduration=2.236906762 podStartE2EDuration="4.890244669s" podCreationTimestamp="2025-10-03 13:52:23 +0000 UTC" firstStartedPulling="2025-10-03 13:52:24.833781882 +0000 UTC m=+3741.043630948" lastFinishedPulling="2025-10-03 13:52:27.487119789 +0000 UTC m=+3743.696968855" observedRunningTime="2025-10-03 13:52:27.884539797 +0000 UTC m=+3744.094388873" watchObservedRunningTime="2025-10-03 13:52:27.890244669 +0000 UTC m=+3744.100093735" Oct 03 13:52:33 crc kubenswrapper[4868]: I1003 13:52:33.871136 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:33 crc kubenswrapper[4868]: I1003 13:52:33.872127 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:33 crc kubenswrapper[4868]: I1003 13:52:33.927307 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:33 crc kubenswrapper[4868]: I1003 13:52:33.989734 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:34 crc kubenswrapper[4868]: I1003 13:52:34.168892 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jtv5b"] Oct 03 13:52:35 crc kubenswrapper[4868]: I1003 13:52:35.934022 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-jtv5b" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="registry-server" containerID="cri-o://6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d" gracePeriod=2 Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.372119 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.474693 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-utilities\") pod \"b0ee712b-3136-4d6c-a8ee-0a136a025480\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.475269 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-catalog-content\") pod \"b0ee712b-3136-4d6c-a8ee-0a136a025480\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.475382 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lc97h\" (UniqueName: \"kubernetes.io/projected/b0ee712b-3136-4d6c-a8ee-0a136a025480-kube-api-access-lc97h\") pod \"b0ee712b-3136-4d6c-a8ee-0a136a025480\" (UID: \"b0ee712b-3136-4d6c-a8ee-0a136a025480\") " Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.475921 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-utilities" (OuterVolumeSpecName: "utilities") pod "b0ee712b-3136-4d6c-a8ee-0a136a025480" (UID: "b0ee712b-3136-4d6c-a8ee-0a136a025480"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.481175 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0ee712b-3136-4d6c-a8ee-0a136a025480-kube-api-access-lc97h" (OuterVolumeSpecName: "kube-api-access-lc97h") pod "b0ee712b-3136-4d6c-a8ee-0a136a025480" (UID: "b0ee712b-3136-4d6c-a8ee-0a136a025480"). InnerVolumeSpecName "kube-api-access-lc97h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.578422 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lc97h\" (UniqueName: \"kubernetes.io/projected/b0ee712b-3136-4d6c-a8ee-0a136a025480-kube-api-access-lc97h\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.578455 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.946454 4868 generic.go:334] "Generic (PLEG): container finished" podID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerID="6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d" exitCode=0 Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.946527 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtv5b" event={"ID":"b0ee712b-3136-4d6c-a8ee-0a136a025480","Type":"ContainerDied","Data":"6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d"} Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.946547 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-jtv5b" Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.946567 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-jtv5b" event={"ID":"b0ee712b-3136-4d6c-a8ee-0a136a025480","Type":"ContainerDied","Data":"24ce8b6d47f47c929b062fad67c551a24a80d53ab29dabd94d1179187862595b"} Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.946632 4868 scope.go:117] "RemoveContainer" containerID="6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d" Oct 03 13:52:36 crc kubenswrapper[4868]: I1003 13:52:36.965856 4868 scope.go:117] "RemoveContainer" containerID="8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.069821 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b0ee712b-3136-4d6c-a8ee-0a136a025480" (UID: "b0ee712b-3136-4d6c-a8ee-0a136a025480"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.088124 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b0ee712b-3136-4d6c-a8ee-0a136a025480-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.111867 4868 scope.go:117] "RemoveContainer" containerID="6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.131262 4868 scope.go:117] "RemoveContainer" containerID="6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d" Oct 03 13:52:37 crc kubenswrapper[4868]: E1003 13:52:37.131776 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d\": container with ID starting with 6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d not found: ID does not exist" containerID="6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.131835 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d"} err="failed to get container status \"6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d\": rpc error: code = NotFound desc = could not find container \"6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d\": container with ID starting with 6bc00812e1df2a691784377cab67477748d814d050bcaacf7fe7a4d2e383ec7d not found: ID does not exist" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.131869 4868 scope.go:117] "RemoveContainer" containerID="8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533" Oct 03 13:52:37 crc kubenswrapper[4868]: E1003 13:52:37.132349 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533\": container with ID starting with 8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533 not found: ID does not exist" containerID="8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.132387 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533"} err="failed to get container status \"8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533\": rpc error: code = NotFound desc = could not find container \"8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533\": container with ID starting with 8c4f6a13be53a07bdb1ce4d501e6fc3811a4c7b070e9cdcb18ec0c87a0b0f533 not found: ID does not exist" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.132412 4868 scope.go:117] "RemoveContainer" containerID="6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7" Oct 03 13:52:37 crc kubenswrapper[4868]: E1003 13:52:37.132650 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7\": container with ID starting with 6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7 not found: ID does not exist" containerID="6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.132673 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7"} err="failed to get container status \"6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7\": rpc error: code = NotFound desc = could not find container \"6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7\": container with ID starting with 6a8512880b7611ba6e99cadf7c3365b89c01d530e4908cb46462d6f9d7b3a0e7 not found: ID does not exist" Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.287147 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-jtv5b"] Oct 03 13:52:37 crc kubenswrapper[4868]: I1003 13:52:37.295958 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-jtv5b"] Oct 03 13:52:38 crc kubenswrapper[4868]: I1003 13:52:38.555892 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" path="/var/lib/kubelet/pods/b0ee712b-3136-4d6c-a8ee-0a136a025480/volumes" Oct 03 13:52:39 crc kubenswrapper[4868]: I1003 13:52:39.543625 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:52:39 crc kubenswrapper[4868]: E1003 13:52:39.544192 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:52:53 crc kubenswrapper[4868]: I1003 13:52:53.543966 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:52:53 crc kubenswrapper[4868]: E1003 13:52:53.545835 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:53:05 crc kubenswrapper[4868]: I1003 13:53:05.544509 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:53:05 crc kubenswrapper[4868]: E1003 13:53:05.545368 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.007697 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7sndf"] Oct 03 13:53:11 crc kubenswrapper[4868]: E1003 13:53:11.008839 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="registry-server" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.008860 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="registry-server" Oct 03 13:53:11 crc kubenswrapper[4868]: E1003 13:53:11.008884 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="extract-content" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.008893 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="extract-content" Oct 03 13:53:11 crc kubenswrapper[4868]: E1003 13:53:11.008909 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="extract-utilities" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.008917 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="extract-utilities" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.009191 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0ee712b-3136-4d6c-a8ee-0a136a025480" containerName="registry-server" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.011142 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.025989 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sndf"] Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.073793 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-catalog-content\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.073880 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-utilities\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.073940 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-95md7\" (UniqueName: \"kubernetes.io/projected/df9810db-8a8a-4e97-aaa9-76df725b3412-kube-api-access-95md7\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.175452 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-95md7\" (UniqueName: \"kubernetes.io/projected/df9810db-8a8a-4e97-aaa9-76df725b3412-kube-api-access-95md7\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.175610 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-catalog-content\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.175677 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-utilities\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.176393 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-utilities\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.176539 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-catalog-content\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.197910 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-95md7\" (UniqueName: \"kubernetes.io/projected/df9810db-8a8a-4e97-aaa9-76df725b3412-kube-api-access-95md7\") pod \"redhat-marketplace-7sndf\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.344044 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:11 crc kubenswrapper[4868]: I1003 13:53:11.836406 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sndf"] Oct 03 13:53:12 crc kubenswrapper[4868]: I1003 13:53:12.262780 4868 generic.go:334] "Generic (PLEG): container finished" podID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerID="31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362" exitCode=0 Oct 03 13:53:12 crc kubenswrapper[4868]: I1003 13:53:12.262983 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sndf" event={"ID":"df9810db-8a8a-4e97-aaa9-76df725b3412","Type":"ContainerDied","Data":"31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362"} Oct 03 13:53:12 crc kubenswrapper[4868]: I1003 13:53:12.263120 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sndf" event={"ID":"df9810db-8a8a-4e97-aaa9-76df725b3412","Type":"ContainerStarted","Data":"526c2ab40d1ba57567e4bf397c9a1dba45fbeda6b99947ebd7ac8c816c1d0dcd"} Oct 03 13:53:12 crc kubenswrapper[4868]: I1003 13:53:12.265001 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:53:13 crc kubenswrapper[4868]: I1003 13:53:13.274150 4868 generic.go:334] "Generic (PLEG): container finished" podID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerID="04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762" exitCode=0 Oct 03 13:53:13 crc kubenswrapper[4868]: I1003 13:53:13.274255 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sndf" event={"ID":"df9810db-8a8a-4e97-aaa9-76df725b3412","Type":"ContainerDied","Data":"04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762"} Oct 03 13:53:14 crc kubenswrapper[4868]: I1003 13:53:14.286235 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sndf" event={"ID":"df9810db-8a8a-4e97-aaa9-76df725b3412","Type":"ContainerStarted","Data":"e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f"} Oct 03 13:53:14 crc kubenswrapper[4868]: I1003 13:53:14.306399 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7sndf" podStartSLOduration=2.885650765 podStartE2EDuration="4.306374941s" podCreationTimestamp="2025-10-03 13:53:10 +0000 UTC" firstStartedPulling="2025-10-03 13:53:12.264724886 +0000 UTC m=+3788.474573952" lastFinishedPulling="2025-10-03 13:53:13.685449062 +0000 UTC m=+3789.895298128" observedRunningTime="2025-10-03 13:53:14.303391271 +0000 UTC m=+3790.513240357" watchObservedRunningTime="2025-10-03 13:53:14.306374941 +0000 UTC m=+3790.516224007" Oct 03 13:53:18 crc kubenswrapper[4868]: I1003 13:53:18.543996 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:53:18 crc kubenswrapper[4868]: E1003 13:53:18.544693 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:53:21 crc kubenswrapper[4868]: I1003 13:53:21.344310 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:21 crc kubenswrapper[4868]: I1003 13:53:21.344631 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:21 crc kubenswrapper[4868]: I1003 13:53:21.389238 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:21 crc kubenswrapper[4868]: I1003 13:53:21.434770 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:21 crc kubenswrapper[4868]: I1003 13:53:21.624487 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sndf"] Oct 03 13:53:23 crc kubenswrapper[4868]: I1003 13:53:23.377330 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7sndf" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="registry-server" containerID="cri-o://e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f" gracePeriod=2 Oct 03 13:53:23 crc kubenswrapper[4868]: I1003 13:53:23.865260 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:23 crc kubenswrapper[4868]: I1003 13:53:23.923916 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-catalog-content\") pod \"df9810db-8a8a-4e97-aaa9-76df725b3412\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " Oct 03 13:53:23 crc kubenswrapper[4868]: I1003 13:53:23.924185 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-utilities\") pod \"df9810db-8a8a-4e97-aaa9-76df725b3412\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " Oct 03 13:53:23 crc kubenswrapper[4868]: I1003 13:53:23.924245 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-95md7\" (UniqueName: \"kubernetes.io/projected/df9810db-8a8a-4e97-aaa9-76df725b3412-kube-api-access-95md7\") pod \"df9810db-8a8a-4e97-aaa9-76df725b3412\" (UID: \"df9810db-8a8a-4e97-aaa9-76df725b3412\") " Oct 03 13:53:23 crc kubenswrapper[4868]: I1003 13:53:23.925992 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-utilities" (OuterVolumeSpecName: "utilities") pod "df9810db-8a8a-4e97-aaa9-76df725b3412" (UID: "df9810db-8a8a-4e97-aaa9-76df725b3412"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:23 crc kubenswrapper[4868]: I1003 13:53:23.931452 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/df9810db-8a8a-4e97-aaa9-76df725b3412-kube-api-access-95md7" (OuterVolumeSpecName: "kube-api-access-95md7") pod "df9810db-8a8a-4e97-aaa9-76df725b3412" (UID: "df9810db-8a8a-4e97-aaa9-76df725b3412"). InnerVolumeSpecName "kube-api-access-95md7". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.026695 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.026728 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-95md7\" (UniqueName: \"kubernetes.io/projected/df9810db-8a8a-4e97-aaa9-76df725b3412-kube-api-access-95md7\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.124307 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "df9810db-8a8a-4e97-aaa9-76df725b3412" (UID: "df9810db-8a8a-4e97-aaa9-76df725b3412"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.128785 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/df9810db-8a8a-4e97-aaa9-76df725b3412-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.388040 4868 generic.go:334] "Generic (PLEG): container finished" podID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerID="e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f" exitCode=0 Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.388118 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sndf" event={"ID":"df9810db-8a8a-4e97-aaa9-76df725b3412","Type":"ContainerDied","Data":"e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f"} Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.388183 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7sndf" event={"ID":"df9810db-8a8a-4e97-aaa9-76df725b3412","Type":"ContainerDied","Data":"526c2ab40d1ba57567e4bf397c9a1dba45fbeda6b99947ebd7ac8c816c1d0dcd"} Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.388183 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7sndf" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.388197 4868 scope.go:117] "RemoveContainer" containerID="e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.408152 4868 scope.go:117] "RemoveContainer" containerID="04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.427120 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sndf"] Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.448045 4868 scope.go:117] "RemoveContainer" containerID="31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.456840 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7sndf"] Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.487768 4868 scope.go:117] "RemoveContainer" containerID="e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f" Oct 03 13:53:24 crc kubenswrapper[4868]: E1003 13:53:24.492498 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f\": container with ID starting with e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f not found: ID does not exist" containerID="e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.492549 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f"} err="failed to get container status \"e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f\": rpc error: code = NotFound desc = could not find container \"e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f\": container with ID starting with e67747208950ad814bd8c8351876397c13f02b7836ae16aab1ae07a004f3432f not found: ID does not exist" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.492968 4868 scope.go:117] "RemoveContainer" containerID="04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762" Oct 03 13:53:24 crc kubenswrapper[4868]: E1003 13:53:24.493792 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762\": container with ID starting with 04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762 not found: ID does not exist" containerID="04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.493821 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762"} err="failed to get container status \"04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762\": rpc error: code = NotFound desc = could not find container \"04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762\": container with ID starting with 04bd108aa43f78c320270d6f754b7a645ed1fbd52e4e901e3f95055ea4348762 not found: ID does not exist" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.493841 4868 scope.go:117] "RemoveContainer" containerID="31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362" Oct 03 13:53:24 crc kubenswrapper[4868]: E1003 13:53:24.494108 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362\": container with ID starting with 31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362 not found: ID does not exist" containerID="31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.494138 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362"} err="failed to get container status \"31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362\": rpc error: code = NotFound desc = could not find container \"31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362\": container with ID starting with 31392eedeca41cc5efff4e0716045f3113e8657b018a18e0118801ec13206362 not found: ID does not exist" Oct 03 13:53:24 crc kubenswrapper[4868]: I1003 13:53:24.557912 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" path="/var/lib/kubelet/pods/df9810db-8a8a-4e97-aaa9-76df725b3412/volumes" Oct 03 13:53:33 crc kubenswrapper[4868]: I1003 13:53:33.543806 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:53:33 crc kubenswrapper[4868]: E1003 13:53:33.544475 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:53:47 crc kubenswrapper[4868]: I1003 13:53:47.544579 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:53:47 crc kubenswrapper[4868]: E1003 13:53:47.545279 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:54:00 crc kubenswrapper[4868]: I1003 13:54:00.544955 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:54:00 crc kubenswrapper[4868]: E1003 13:54:00.546355 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:54:11 crc kubenswrapper[4868]: I1003 13:54:11.544002 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:54:11 crc kubenswrapper[4868]: E1003 13:54:11.544813 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:54:25 crc kubenswrapper[4868]: I1003 13:54:25.543942 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:54:25 crc kubenswrapper[4868]: E1003 13:54:25.544728 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:54:37 crc kubenswrapper[4868]: I1003 13:54:37.544499 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:54:37 crc kubenswrapper[4868]: E1003 13:54:37.545243 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:54:49 crc kubenswrapper[4868]: I1003 13:54:49.544095 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:54:49 crc kubenswrapper[4868]: E1003 13:54:49.544918 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:55:03 crc kubenswrapper[4868]: I1003 13:55:03.544442 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:55:03 crc kubenswrapper[4868]: E1003 13:55:03.545151 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:55:17 crc kubenswrapper[4868]: I1003 13:55:17.544929 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:55:17 crc kubenswrapper[4868]: E1003 13:55:17.546313 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:55:30 crc kubenswrapper[4868]: I1003 13:55:30.544243 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:55:30 crc kubenswrapper[4868]: E1003 13:55:30.545041 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:55:44 crc kubenswrapper[4868]: I1003 13:55:44.552727 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:55:44 crc kubenswrapper[4868]: E1003 13:55:44.554178 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:55:57 crc kubenswrapper[4868]: I1003 13:55:57.544302 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:55:57 crc kubenswrapper[4868]: E1003 13:55:57.546227 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 13:56:10 crc kubenswrapper[4868]: I1003 13:56:10.545006 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:56:10 crc kubenswrapper[4868]: I1003 13:56:10.950465 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"2bddec08a59b9a6fece045aa12076d03d639d7e198b6f07effe829bec69f2a1e"} Oct 03 13:58:32 crc kubenswrapper[4868]: I1003 13:58:32.145490 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:58:32 crc kubenswrapper[4868]: I1003 13:58:32.146590 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:59:02 crc kubenswrapper[4868]: I1003 13:59:02.145562 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:59:02 crc kubenswrapper[4868]: I1003 13:59:02.146069 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.688291 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-nv9wg"] Oct 03 13:59:04 crc kubenswrapper[4868]: E1003 13:59:04.689090 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="extract-utilities" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.689107 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="extract-utilities" Oct 03 13:59:04 crc kubenswrapper[4868]: E1003 13:59:04.689147 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="registry-server" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.689156 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="registry-server" Oct 03 13:59:04 crc kubenswrapper[4868]: E1003 13:59:04.689178 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="extract-content" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.689185 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="extract-content" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.689430 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="df9810db-8a8a-4e97-aaa9-76df725b3412" containerName="registry-server" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.691110 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.697906 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nv9wg"] Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.809976 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-catalog-content\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.810119 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlf4l\" (UniqueName: \"kubernetes.io/projected/7bcff761-b53e-4e93-b24b-edd828aae8cd-kube-api-access-mlf4l\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.810792 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-utilities\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.912230 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-catalog-content\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.912329 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlf4l\" (UniqueName: \"kubernetes.io/projected/7bcff761-b53e-4e93-b24b-edd828aae8cd-kube-api-access-mlf4l\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.912395 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-utilities\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.912851 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-utilities\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.913171 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-catalog-content\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:04 crc kubenswrapper[4868]: I1003 13:59:04.935134 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlf4l\" (UniqueName: \"kubernetes.io/projected/7bcff761-b53e-4e93-b24b-edd828aae8cd-kube-api-access-mlf4l\") pod \"certified-operators-nv9wg\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:05 crc kubenswrapper[4868]: I1003 13:59:05.012468 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:05 crc kubenswrapper[4868]: I1003 13:59:05.509917 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-nv9wg"] Oct 03 13:59:06 crc kubenswrapper[4868]: I1003 13:59:06.481173 4868 generic.go:334] "Generic (PLEG): container finished" podID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerID="305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6" exitCode=0 Oct 03 13:59:06 crc kubenswrapper[4868]: I1003 13:59:06.481651 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv9wg" event={"ID":"7bcff761-b53e-4e93-b24b-edd828aae8cd","Type":"ContainerDied","Data":"305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6"} Oct 03 13:59:06 crc kubenswrapper[4868]: I1003 13:59:06.481964 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv9wg" event={"ID":"7bcff761-b53e-4e93-b24b-edd828aae8cd","Type":"ContainerStarted","Data":"aff6803ffaa579ec8fc56a37fcea8ca5d2b53d679183369ee215b79cb55d5012"} Oct 03 13:59:06 crc kubenswrapper[4868]: I1003 13:59:06.484780 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 13:59:07 crc kubenswrapper[4868]: I1003 13:59:07.492365 4868 generic.go:334] "Generic (PLEG): container finished" podID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerID="0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b" exitCode=0 Oct 03 13:59:07 crc kubenswrapper[4868]: I1003 13:59:07.492411 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv9wg" event={"ID":"7bcff761-b53e-4e93-b24b-edd828aae8cd","Type":"ContainerDied","Data":"0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b"} Oct 03 13:59:08 crc kubenswrapper[4868]: I1003 13:59:08.502572 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv9wg" event={"ID":"7bcff761-b53e-4e93-b24b-edd828aae8cd","Type":"ContainerStarted","Data":"338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0"} Oct 03 13:59:08 crc kubenswrapper[4868]: I1003 13:59:08.526425 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-nv9wg" podStartSLOduration=2.965205231 podStartE2EDuration="4.526408322s" podCreationTimestamp="2025-10-03 13:59:04 +0000 UTC" firstStartedPulling="2025-10-03 13:59:06.484440774 +0000 UTC m=+4142.694289840" lastFinishedPulling="2025-10-03 13:59:08.045643865 +0000 UTC m=+4144.255492931" observedRunningTime="2025-10-03 13:59:08.518278018 +0000 UTC m=+4144.728127084" watchObservedRunningTime="2025-10-03 13:59:08.526408322 +0000 UTC m=+4144.736257388" Oct 03 13:59:15 crc kubenswrapper[4868]: I1003 13:59:15.013002 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:15 crc kubenswrapper[4868]: I1003 13:59:15.014151 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:15 crc kubenswrapper[4868]: I1003 13:59:15.055928 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:15 crc kubenswrapper[4868]: I1003 13:59:15.629387 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:15 crc kubenswrapper[4868]: I1003 13:59:15.680908 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nv9wg"] Oct 03 13:59:17 crc kubenswrapper[4868]: I1003 13:59:17.601619 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-nv9wg" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="registry-server" containerID="cri-o://338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0" gracePeriod=2 Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.118804 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.184025 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-utilities\") pod \"7bcff761-b53e-4e93-b24b-edd828aae8cd\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.184100 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlf4l\" (UniqueName: \"kubernetes.io/projected/7bcff761-b53e-4e93-b24b-edd828aae8cd-kube-api-access-mlf4l\") pod \"7bcff761-b53e-4e93-b24b-edd828aae8cd\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.184272 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-catalog-content\") pod \"7bcff761-b53e-4e93-b24b-edd828aae8cd\" (UID: \"7bcff761-b53e-4e93-b24b-edd828aae8cd\") " Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.185246 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-utilities" (OuterVolumeSpecName: "utilities") pod "7bcff761-b53e-4e93-b24b-edd828aae8cd" (UID: "7bcff761-b53e-4e93-b24b-edd828aae8cd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.192456 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bcff761-b53e-4e93-b24b-edd828aae8cd-kube-api-access-mlf4l" (OuterVolumeSpecName: "kube-api-access-mlf4l") pod "7bcff761-b53e-4e93-b24b-edd828aae8cd" (UID: "7bcff761-b53e-4e93-b24b-edd828aae8cd"). InnerVolumeSpecName "kube-api-access-mlf4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.286133 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.286172 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlf4l\" (UniqueName: \"kubernetes.io/projected/7bcff761-b53e-4e93-b24b-edd828aae8cd-kube-api-access-mlf4l\") on node \"crc\" DevicePath \"\"" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.390995 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7bcff761-b53e-4e93-b24b-edd828aae8cd" (UID: "7bcff761-b53e-4e93-b24b-edd828aae8cd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.489193 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7bcff761-b53e-4e93-b24b-edd828aae8cd-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.612752 4868 generic.go:334] "Generic (PLEG): container finished" podID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerID="338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0" exitCode=0 Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.612796 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv9wg" event={"ID":"7bcff761-b53e-4e93-b24b-edd828aae8cd","Type":"ContainerDied","Data":"338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0"} Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.612828 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-nv9wg" event={"ID":"7bcff761-b53e-4e93-b24b-edd828aae8cd","Type":"ContainerDied","Data":"aff6803ffaa579ec8fc56a37fcea8ca5d2b53d679183369ee215b79cb55d5012"} Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.612846 4868 scope.go:117] "RemoveContainer" containerID="338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.612885 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-nv9wg" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.655136 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-nv9wg"] Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.659953 4868 scope.go:117] "RemoveContainer" containerID="0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.664355 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-nv9wg"] Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.687622 4868 scope.go:117] "RemoveContainer" containerID="305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.746034 4868 scope.go:117] "RemoveContainer" containerID="338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0" Oct 03 13:59:18 crc kubenswrapper[4868]: E1003 13:59:18.746524 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0\": container with ID starting with 338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0 not found: ID does not exist" containerID="338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.746548 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0"} err="failed to get container status \"338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0\": rpc error: code = NotFound desc = could not find container \"338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0\": container with ID starting with 338551078b2250465a2a56e617a41a489b0f1518f2e83da515adf95f7c0c66a0 not found: ID does not exist" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.746567 4868 scope.go:117] "RemoveContainer" containerID="0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b" Oct 03 13:59:18 crc kubenswrapper[4868]: E1003 13:59:18.746945 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b\": container with ID starting with 0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b not found: ID does not exist" containerID="0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.746962 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b"} err="failed to get container status \"0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b\": rpc error: code = NotFound desc = could not find container \"0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b\": container with ID starting with 0987fae0b9abcef9b15ed1e1ae7ccb1427a7b0db4c388fae3073fc2080c0ac8b not found: ID does not exist" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.746974 4868 scope.go:117] "RemoveContainer" containerID="305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6" Oct 03 13:59:18 crc kubenswrapper[4868]: E1003 13:59:18.747208 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6\": container with ID starting with 305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6 not found: ID does not exist" containerID="305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6" Oct 03 13:59:18 crc kubenswrapper[4868]: I1003 13:59:18.747223 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6"} err="failed to get container status \"305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6\": rpc error: code = NotFound desc = could not find container \"305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6\": container with ID starting with 305618cf609fd58db81bbb7470448a0879600a513ffc7139f27a0e8c1660bdf6 not found: ID does not exist" Oct 03 13:59:20 crc kubenswrapper[4868]: I1003 13:59:20.555381 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" path="/var/lib/kubelet/pods/7bcff761-b53e-4e93-b24b-edd828aae8cd/volumes" Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.146227 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.146826 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.146889 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.147790 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2bddec08a59b9a6fece045aa12076d03d639d7e198b6f07effe829bec69f2a1e"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.147863 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://2bddec08a59b9a6fece045aa12076d03d639d7e198b6f07effe829bec69f2a1e" gracePeriod=600 Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.731772 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="2bddec08a59b9a6fece045aa12076d03d639d7e198b6f07effe829bec69f2a1e" exitCode=0 Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.731890 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"2bddec08a59b9a6fece045aa12076d03d639d7e198b6f07effe829bec69f2a1e"} Oct 03 13:59:32 crc kubenswrapper[4868]: I1003 13:59:32.732323 4868 scope.go:117] "RemoveContainer" containerID="bdd164257c6b86f6aacfc79ddf553421e7787515f02879206c290ba67a70e33b" Oct 03 13:59:33 crc kubenswrapper[4868]: I1003 13:59:33.743508 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264"} Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.156968 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9"] Oct 03 14:00:00 crc kubenswrapper[4868]: E1003 14:00:00.158897 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="extract-utilities" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.158978 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="extract-utilities" Oct 03 14:00:00 crc kubenswrapper[4868]: E1003 14:00:00.159048 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="extract-content" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.159141 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="extract-content" Oct 03 14:00:00 crc kubenswrapper[4868]: E1003 14:00:00.159218 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="registry-server" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.159285 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="registry-server" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.159597 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bcff761-b53e-4e93-b24b-edd828aae8cd" containerName="registry-server" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.160547 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.163492 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.163574 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.169492 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9"] Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.181908 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70ee688e-8328-4711-83aa-6853928d0be2-secret-volume\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.181954 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9hgq2\" (UniqueName: \"kubernetes.io/projected/70ee688e-8328-4711-83aa-6853928d0be2-kube-api-access-9hgq2\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.182068 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70ee688e-8328-4711-83aa-6853928d0be2-config-volume\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.285347 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70ee688e-8328-4711-83aa-6853928d0be2-secret-volume\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.285708 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9hgq2\" (UniqueName: \"kubernetes.io/projected/70ee688e-8328-4711-83aa-6853928d0be2-kube-api-access-9hgq2\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.285882 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70ee688e-8328-4711-83aa-6853928d0be2-config-volume\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.287677 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70ee688e-8328-4711-83aa-6853928d0be2-config-volume\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.292819 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70ee688e-8328-4711-83aa-6853928d0be2-secret-volume\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.303100 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9hgq2\" (UniqueName: \"kubernetes.io/projected/70ee688e-8328-4711-83aa-6853928d0be2-kube-api-access-9hgq2\") pod \"collect-profiles-29325000-vfvs9\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.543101 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.976681 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9"] Oct 03 14:00:00 crc kubenswrapper[4868]: I1003 14:00:00.996702 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" event={"ID":"70ee688e-8328-4711-83aa-6853928d0be2","Type":"ContainerStarted","Data":"6e3559102565e1fff1e1db0d5fc22891378f55284f7f414b1f152ba31bda7d2f"} Oct 03 14:00:02 crc kubenswrapper[4868]: I1003 14:00:02.007681 4868 generic.go:334] "Generic (PLEG): container finished" podID="70ee688e-8328-4711-83aa-6853928d0be2" containerID="bc440d22cf043f6551132dd80f19a9c335ca13fbc443b1e24f86163586784584" exitCode=0 Oct 03 14:00:02 crc kubenswrapper[4868]: I1003 14:00:02.007773 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" event={"ID":"70ee688e-8328-4711-83aa-6853928d0be2","Type":"ContainerDied","Data":"bc440d22cf043f6551132dd80f19a9c335ca13fbc443b1e24f86163586784584"} Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.343090 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.447962 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70ee688e-8328-4711-83aa-6853928d0be2-secret-volume\") pod \"70ee688e-8328-4711-83aa-6853928d0be2\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.448041 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9hgq2\" (UniqueName: \"kubernetes.io/projected/70ee688e-8328-4711-83aa-6853928d0be2-kube-api-access-9hgq2\") pod \"70ee688e-8328-4711-83aa-6853928d0be2\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.448126 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70ee688e-8328-4711-83aa-6853928d0be2-config-volume\") pod \"70ee688e-8328-4711-83aa-6853928d0be2\" (UID: \"70ee688e-8328-4711-83aa-6853928d0be2\") " Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.448751 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70ee688e-8328-4711-83aa-6853928d0be2-config-volume" (OuterVolumeSpecName: "config-volume") pod "70ee688e-8328-4711-83aa-6853928d0be2" (UID: "70ee688e-8328-4711-83aa-6853928d0be2"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.463632 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/70ee688e-8328-4711-83aa-6853928d0be2-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "70ee688e-8328-4711-83aa-6853928d0be2" (UID: "70ee688e-8328-4711-83aa-6853928d0be2"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.464064 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70ee688e-8328-4711-83aa-6853928d0be2-kube-api-access-9hgq2" (OuterVolumeSpecName: "kube-api-access-9hgq2") pod "70ee688e-8328-4711-83aa-6853928d0be2" (UID: "70ee688e-8328-4711-83aa-6853928d0be2"). InnerVolumeSpecName "kube-api-access-9hgq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.551367 4868 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/70ee688e-8328-4711-83aa-6853928d0be2-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.551404 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9hgq2\" (UniqueName: \"kubernetes.io/projected/70ee688e-8328-4711-83aa-6853928d0be2-kube-api-access-9hgq2\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:03 crc kubenswrapper[4868]: I1003 14:00:03.551414 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/70ee688e-8328-4711-83aa-6853928d0be2-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:00:04 crc kubenswrapper[4868]: I1003 14:00:04.034677 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" event={"ID":"70ee688e-8328-4711-83aa-6853928d0be2","Type":"ContainerDied","Data":"6e3559102565e1fff1e1db0d5fc22891378f55284f7f414b1f152ba31bda7d2f"} Oct 03 14:00:04 crc kubenswrapper[4868]: I1003 14:00:04.035006 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e3559102565e1fff1e1db0d5fc22891378f55284f7f414b1f152ba31bda7d2f" Oct 03 14:00:04 crc kubenswrapper[4868]: I1003 14:00:04.034762 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325000-vfvs9" Oct 03 14:00:04 crc kubenswrapper[4868]: I1003 14:00:04.416226 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92"] Oct 03 14:00:04 crc kubenswrapper[4868]: I1003 14:00:04.423992 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324955-gnv92"] Oct 03 14:00:04 crc kubenswrapper[4868]: I1003 14:00:04.558886 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="767b70bd-7d85-4b49-b429-1fad0a5eac9c" path="/var/lib/kubelet/pods/767b70bd-7d85-4b49-b429-1fad0a5eac9c/volumes" Oct 03 14:00:16 crc kubenswrapper[4868]: I1003 14:00:16.782142 4868 scope.go:117] "RemoveContainer" containerID="e928ebf1948250fd3d2727dc59548a5395912da72e1496903b1845e5a550e404" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.149663 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29325001-w7zlw"] Oct 03 14:01:00 crc kubenswrapper[4868]: E1003 14:01:00.150791 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70ee688e-8328-4711-83aa-6853928d0be2" containerName="collect-profiles" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.150810 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="70ee688e-8328-4711-83aa-6853928d0be2" containerName="collect-profiles" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.151081 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="70ee688e-8328-4711-83aa-6853928d0be2" containerName="collect-profiles" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.151833 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.161437 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325001-w7zlw"] Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.285752 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-combined-ca-bundle\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.285922 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-config-data\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.285947 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s5tcx\" (UniqueName: \"kubernetes.io/projected/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-kube-api-access-s5tcx\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.285986 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-fernet-keys\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.388087 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-combined-ca-bundle\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.388241 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-config-data\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.388292 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s5tcx\" (UniqueName: \"kubernetes.io/projected/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-kube-api-access-s5tcx\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.388343 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-fernet-keys\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.395091 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-combined-ca-bundle\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.395277 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-config-data\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.405024 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-fernet-keys\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.411267 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s5tcx\" (UniqueName: \"kubernetes.io/projected/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-kube-api-access-s5tcx\") pod \"keystone-cron-29325001-w7zlw\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.478918 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:00 crc kubenswrapper[4868]: I1003 14:01:00.916715 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29325001-w7zlw"] Oct 03 14:01:01 crc kubenswrapper[4868]: I1003 14:01:01.552040 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-w7zlw" event={"ID":"246879f2-e7ef-4506-ac99-7a5bf0bcbe62","Type":"ContainerStarted","Data":"4a41673b300bc401868fb2a6ad92447e07d3572f7a13da62ab11924e5a093a90"} Oct 03 14:01:01 crc kubenswrapper[4868]: I1003 14:01:01.552348 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-w7zlw" event={"ID":"246879f2-e7ef-4506-ac99-7a5bf0bcbe62","Type":"ContainerStarted","Data":"a505eb847794aafd8c0c2d578c913d437508d42e6c42c92c1cf540890aefc62f"} Oct 03 14:01:01 crc kubenswrapper[4868]: I1003 14:01:01.572489 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29325001-w7zlw" podStartSLOduration=1.572467612 podStartE2EDuration="1.572467612s" podCreationTimestamp="2025-10-03 14:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 14:01:01.571857636 +0000 UTC m=+4257.781706712" watchObservedRunningTime="2025-10-03 14:01:01.572467612 +0000 UTC m=+4257.782316678" Oct 03 14:01:03 crc kubenswrapper[4868]: I1003 14:01:03.568185 4868 generic.go:334] "Generic (PLEG): container finished" podID="246879f2-e7ef-4506-ac99-7a5bf0bcbe62" containerID="4a41673b300bc401868fb2a6ad92447e07d3572f7a13da62ab11924e5a093a90" exitCode=0 Oct 03 14:01:03 crc kubenswrapper[4868]: I1003 14:01:03.568284 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-w7zlw" event={"ID":"246879f2-e7ef-4506-ac99-7a5bf0bcbe62","Type":"ContainerDied","Data":"4a41673b300bc401868fb2a6ad92447e07d3572f7a13da62ab11924e5a093a90"} Oct 03 14:01:04 crc kubenswrapper[4868]: I1003 14:01:04.895323 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:04 crc kubenswrapper[4868]: I1003 14:01:04.988440 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s5tcx\" (UniqueName: \"kubernetes.io/projected/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-kube-api-access-s5tcx\") pod \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " Oct 03 14:01:04 crc kubenswrapper[4868]: I1003 14:01:04.988514 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-fernet-keys\") pod \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " Oct 03 14:01:04 crc kubenswrapper[4868]: I1003 14:01:04.988591 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-config-data\") pod \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " Oct 03 14:01:04 crc kubenswrapper[4868]: I1003 14:01:04.988615 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-combined-ca-bundle\") pod \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\" (UID: \"246879f2-e7ef-4506-ac99-7a5bf0bcbe62\") " Oct 03 14:01:04 crc kubenswrapper[4868]: I1003 14:01:04.994721 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-kube-api-access-s5tcx" (OuterVolumeSpecName: "kube-api-access-s5tcx") pod "246879f2-e7ef-4506-ac99-7a5bf0bcbe62" (UID: "246879f2-e7ef-4506-ac99-7a5bf0bcbe62"). InnerVolumeSpecName "kube-api-access-s5tcx". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:01:04 crc kubenswrapper[4868]: I1003 14:01:04.997211 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "246879f2-e7ef-4506-ac99-7a5bf0bcbe62" (UID: "246879f2-e7ef-4506-ac99-7a5bf0bcbe62"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.021630 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "246879f2-e7ef-4506-ac99-7a5bf0bcbe62" (UID: "246879f2-e7ef-4506-ac99-7a5bf0bcbe62"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.039300 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-config-data" (OuterVolumeSpecName: "config-data") pod "246879f2-e7ef-4506-ac99-7a5bf0bcbe62" (UID: "246879f2-e7ef-4506-ac99-7a5bf0bcbe62"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.090562 4868 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-config-data\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.090595 4868 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.090610 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s5tcx\" (UniqueName: \"kubernetes.io/projected/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-kube-api-access-s5tcx\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.090640 4868 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/246879f2-e7ef-4506-ac99-7a5bf0bcbe62-fernet-keys\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.587334 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29325001-w7zlw" event={"ID":"246879f2-e7ef-4506-ac99-7a5bf0bcbe62","Type":"ContainerDied","Data":"a505eb847794aafd8c0c2d578c913d437508d42e6c42c92c1cf540890aefc62f"} Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.587382 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a505eb847794aafd8c0c2d578c913d437508d42e6c42c92c1cf540890aefc62f" Oct 03 14:01:05 crc kubenswrapper[4868]: I1003 14:01:05.587408 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29325001-w7zlw" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.218730 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-pp5rr"] Oct 03 14:01:28 crc kubenswrapper[4868]: E1003 14:01:28.222223 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="246879f2-e7ef-4506-ac99-7a5bf0bcbe62" containerName="keystone-cron" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.222354 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="246879f2-e7ef-4506-ac99-7a5bf0bcbe62" containerName="keystone-cron" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.222726 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="246879f2-e7ef-4506-ac99-7a5bf0bcbe62" containerName="keystone-cron" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.226802 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.231045 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pp5rr"] Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.343134 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t2jxr\" (UniqueName: \"kubernetes.io/projected/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-kube-api-access-t2jxr\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.343209 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-catalog-content\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.343249 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-utilities\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.445410 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t2jxr\" (UniqueName: \"kubernetes.io/projected/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-kube-api-access-t2jxr\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.445535 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-catalog-content\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.445597 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-utilities\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.446091 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-catalog-content\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.446293 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-utilities\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.467177 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t2jxr\" (UniqueName: \"kubernetes.io/projected/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-kube-api-access-t2jxr\") pod \"redhat-operators-pp5rr\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:28 crc kubenswrapper[4868]: I1003 14:01:28.560583 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:29 crc kubenswrapper[4868]: I1003 14:01:29.011848 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-pp5rr"] Oct 03 14:01:29 crc kubenswrapper[4868]: I1003 14:01:29.824721 4868 generic.go:334] "Generic (PLEG): container finished" podID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerID="800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b" exitCode=0 Oct 03 14:01:29 crc kubenswrapper[4868]: I1003 14:01:29.824794 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp5rr" event={"ID":"6f5a1b6c-24e8-4028-9169-55eb17b5a04f","Type":"ContainerDied","Data":"800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b"} Oct 03 14:01:29 crc kubenswrapper[4868]: I1003 14:01:29.825227 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp5rr" event={"ID":"6f5a1b6c-24e8-4028-9169-55eb17b5a04f","Type":"ContainerStarted","Data":"7a2bd7938785e4a065ede18919476d0bbbe0ab8f163001b3fc2d8fa912e35caf"} Oct 03 14:01:31 crc kubenswrapper[4868]: I1003 14:01:31.854328 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp5rr" event={"ID":"6f5a1b6c-24e8-4028-9169-55eb17b5a04f","Type":"ContainerStarted","Data":"b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1"} Oct 03 14:01:33 crc kubenswrapper[4868]: I1003 14:01:33.877943 4868 generic.go:334] "Generic (PLEG): container finished" podID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerID="b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1" exitCode=0 Oct 03 14:01:33 crc kubenswrapper[4868]: I1003 14:01:33.878018 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp5rr" event={"ID":"6f5a1b6c-24e8-4028-9169-55eb17b5a04f","Type":"ContainerDied","Data":"b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1"} Oct 03 14:01:34 crc kubenswrapper[4868]: I1003 14:01:34.891917 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp5rr" event={"ID":"6f5a1b6c-24e8-4028-9169-55eb17b5a04f","Type":"ContainerStarted","Data":"7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71"} Oct 03 14:01:34 crc kubenswrapper[4868]: I1003 14:01:34.924445 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-pp5rr" podStartSLOduration=2.404781484 podStartE2EDuration="6.924425803s" podCreationTimestamp="2025-10-03 14:01:28 +0000 UTC" firstStartedPulling="2025-10-03 14:01:29.826460407 +0000 UTC m=+4286.036309473" lastFinishedPulling="2025-10-03 14:01:34.346104686 +0000 UTC m=+4290.555953792" observedRunningTime="2025-10-03 14:01:34.918602491 +0000 UTC m=+4291.128451557" watchObservedRunningTime="2025-10-03 14:01:34.924425803 +0000 UTC m=+4291.134274869" Oct 03 14:01:38 crc kubenswrapper[4868]: I1003 14:01:38.561082 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:38 crc kubenswrapper[4868]: I1003 14:01:38.561756 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:39 crc kubenswrapper[4868]: I1003 14:01:39.619967 4868 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-pp5rr" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="registry-server" probeResult="failure" output=< Oct 03 14:01:39 crc kubenswrapper[4868]: timeout: failed to connect service ":50051" within 1s Oct 03 14:01:39 crc kubenswrapper[4868]: > Oct 03 14:01:48 crc kubenswrapper[4868]: I1003 14:01:48.616793 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:48 crc kubenswrapper[4868]: I1003 14:01:48.668491 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:48 crc kubenswrapper[4868]: I1003 14:01:48.852625 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pp5rr"] Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.029829 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-pp5rr" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="registry-server" containerID="cri-o://7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71" gracePeriod=2 Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.476772 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.581983 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t2jxr\" (UniqueName: \"kubernetes.io/projected/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-kube-api-access-t2jxr\") pod \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.582591 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-catalog-content\") pod \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.582764 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-utilities\") pod \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\" (UID: \"6f5a1b6c-24e8-4028-9169-55eb17b5a04f\") " Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.583900 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-utilities" (OuterVolumeSpecName: "utilities") pod "6f5a1b6c-24e8-4028-9169-55eb17b5a04f" (UID: "6f5a1b6c-24e8-4028-9169-55eb17b5a04f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.589422 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-kube-api-access-t2jxr" (OuterVolumeSpecName: "kube-api-access-t2jxr") pod "6f5a1b6c-24e8-4028-9169-55eb17b5a04f" (UID: "6f5a1b6c-24e8-4028-9169-55eb17b5a04f"). InnerVolumeSpecName "kube-api-access-t2jxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.661286 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6f5a1b6c-24e8-4028-9169-55eb17b5a04f" (UID: "6f5a1b6c-24e8-4028-9169-55eb17b5a04f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.684930 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.684980 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t2jxr\" (UniqueName: \"kubernetes.io/projected/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-kube-api-access-t2jxr\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:50 crc kubenswrapper[4868]: I1003 14:01:50.685000 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6f5a1b6c-24e8-4028-9169-55eb17b5a04f-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.039632 4868 generic.go:334] "Generic (PLEG): container finished" podID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerID="7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71" exitCode=0 Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.039688 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp5rr" event={"ID":"6f5a1b6c-24e8-4028-9169-55eb17b5a04f","Type":"ContainerDied","Data":"7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71"} Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.039714 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-pp5rr" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.039735 4868 scope.go:117] "RemoveContainer" containerID="7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.039721 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-pp5rr" event={"ID":"6f5a1b6c-24e8-4028-9169-55eb17b5a04f","Type":"ContainerDied","Data":"7a2bd7938785e4a065ede18919476d0bbbe0ab8f163001b3fc2d8fa912e35caf"} Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.062641 4868 scope.go:117] "RemoveContainer" containerID="b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.079404 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-pp5rr"] Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.088571 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-pp5rr"] Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.108853 4868 scope.go:117] "RemoveContainer" containerID="800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.130469 4868 scope.go:117] "RemoveContainer" containerID="7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71" Oct 03 14:01:51 crc kubenswrapper[4868]: E1003 14:01:51.130891 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71\": container with ID starting with 7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71 not found: ID does not exist" containerID="7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.130916 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71"} err="failed to get container status \"7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71\": rpc error: code = NotFound desc = could not find container \"7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71\": container with ID starting with 7d96e462e84236498f05601e2342b45f3c3e51f0ce7f39394d5c665bbbab0b71 not found: ID does not exist" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.130937 4868 scope.go:117] "RemoveContainer" containerID="b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1" Oct 03 14:01:51 crc kubenswrapper[4868]: E1003 14:01:51.131158 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1\": container with ID starting with b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1 not found: ID does not exist" containerID="b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.131176 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1"} err="failed to get container status \"b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1\": rpc error: code = NotFound desc = could not find container \"b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1\": container with ID starting with b5921a89d3deb567e246f456960446c23b08b605d32442dac93e29b52b9feba1 not found: ID does not exist" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.131191 4868 scope.go:117] "RemoveContainer" containerID="800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b" Oct 03 14:01:51 crc kubenswrapper[4868]: E1003 14:01:51.131399 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b\": container with ID starting with 800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b not found: ID does not exist" containerID="800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b" Oct 03 14:01:51 crc kubenswrapper[4868]: I1003 14:01:51.131420 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b"} err="failed to get container status \"800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b\": rpc error: code = NotFound desc = could not find container \"800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b\": container with ID starting with 800c2743eea95c1a63dfa4a01ae195bd2d4f2fd4bdba7b2f867cea81d8bc885b not found: ID does not exist" Oct 03 14:01:52 crc kubenswrapper[4868]: I1003 14:01:52.557997 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" path="/var/lib/kubelet/pods/6f5a1b6c-24e8-4028-9169-55eb17b5a04f/volumes" Oct 03 14:02:02 crc kubenswrapper[4868]: I1003 14:02:02.145404 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:02:02 crc kubenswrapper[4868]: I1003 14:02:02.146275 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:02:32 crc kubenswrapper[4868]: I1003 14:02:32.145500 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:02:32 crc kubenswrapper[4868]: I1003 14:02:32.146264 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:02:38 crc kubenswrapper[4868]: I1003 14:02:38.706646 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-75c4dd668c-xqjsj" podUID="18a5e2cd-7517-4ef9-ab47-f4236b4bb836" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.145895 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.146579 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.146625 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.147315 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.147375 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" gracePeriod=600 Oct 03 14:03:02 crc kubenswrapper[4868]: E1003 14:03:02.282224 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.745663 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" exitCode=0 Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.745711 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264"} Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.745747 4868 scope.go:117] "RemoveContainer" containerID="2bddec08a59b9a6fece045aa12076d03d639d7e198b6f07effe829bec69f2a1e" Oct 03 14:03:02 crc kubenswrapper[4868]: I1003 14:03:02.746654 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:03:02 crc kubenswrapper[4868]: E1003 14:03:02.747116 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.783379 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w9tg2"] Oct 03 14:03:04 crc kubenswrapper[4868]: E1003 14:03:04.784510 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="extract-utilities" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.784525 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="extract-utilities" Oct 03 14:03:04 crc kubenswrapper[4868]: E1003 14:03:04.784560 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="extract-content" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.784568 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="extract-content" Oct 03 14:03:04 crc kubenswrapper[4868]: E1003 14:03:04.784580 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="registry-server" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.784586 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="registry-server" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.784756 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="6f5a1b6c-24e8-4028-9169-55eb17b5a04f" containerName="registry-server" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.786400 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.791927 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w9tg2"] Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.904908 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-catalog-content\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.904989 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ww68n\" (UniqueName: \"kubernetes.io/projected/759410ab-7bc0-451d-b936-10d425df4ba3-kube-api-access-ww68n\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:04 crc kubenswrapper[4868]: I1003 14:03:04.905020 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-utilities\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.007118 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ww68n\" (UniqueName: \"kubernetes.io/projected/759410ab-7bc0-451d-b936-10d425df4ba3-kube-api-access-ww68n\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.007201 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-utilities\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.007411 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-catalog-content\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.007810 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-utilities\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.007858 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-catalog-content\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.028235 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ww68n\" (UniqueName: \"kubernetes.io/projected/759410ab-7bc0-451d-b936-10d425df4ba3-kube-api-access-ww68n\") pod \"community-operators-w9tg2\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.108311 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.595965 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w9tg2"] Oct 03 14:03:05 crc kubenswrapper[4868]: I1003 14:03:05.780016 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9tg2" event={"ID":"759410ab-7bc0-451d-b936-10d425df4ba3","Type":"ContainerStarted","Data":"331cfee1420eb322753b349442ebf4a6a90f9bb25e061bb74b2ecda82dbe67f7"} Oct 03 14:03:06 crc kubenswrapper[4868]: I1003 14:03:06.791077 4868 generic.go:334] "Generic (PLEG): container finished" podID="759410ab-7bc0-451d-b936-10d425df4ba3" containerID="fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164" exitCode=0 Oct 03 14:03:06 crc kubenswrapper[4868]: I1003 14:03:06.791126 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9tg2" event={"ID":"759410ab-7bc0-451d-b936-10d425df4ba3","Type":"ContainerDied","Data":"fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164"} Oct 03 14:03:07 crc kubenswrapper[4868]: I1003 14:03:07.804027 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9tg2" event={"ID":"759410ab-7bc0-451d-b936-10d425df4ba3","Type":"ContainerStarted","Data":"9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3"} Oct 03 14:03:08 crc kubenswrapper[4868]: I1003 14:03:08.818318 4868 generic.go:334] "Generic (PLEG): container finished" podID="759410ab-7bc0-451d-b936-10d425df4ba3" containerID="9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3" exitCode=0 Oct 03 14:03:08 crc kubenswrapper[4868]: I1003 14:03:08.818392 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9tg2" event={"ID":"759410ab-7bc0-451d-b936-10d425df4ba3","Type":"ContainerDied","Data":"9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3"} Oct 03 14:03:09 crc kubenswrapper[4868]: I1003 14:03:09.835249 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9tg2" event={"ID":"759410ab-7bc0-451d-b936-10d425df4ba3","Type":"ContainerStarted","Data":"fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae"} Oct 03 14:03:09 crc kubenswrapper[4868]: I1003 14:03:09.855955 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w9tg2" podStartSLOduration=3.425921368 podStartE2EDuration="5.855929269s" podCreationTimestamp="2025-10-03 14:03:04 +0000 UTC" firstStartedPulling="2025-10-03 14:03:06.794020036 +0000 UTC m=+4383.003869092" lastFinishedPulling="2025-10-03 14:03:09.224027927 +0000 UTC m=+4385.433876993" observedRunningTime="2025-10-03 14:03:09.851960206 +0000 UTC m=+4386.061809272" watchObservedRunningTime="2025-10-03 14:03:09.855929269 +0000 UTC m=+4386.065778345" Oct 03 14:03:15 crc kubenswrapper[4868]: I1003 14:03:15.108747 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:15 crc kubenswrapper[4868]: I1003 14:03:15.109446 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:15 crc kubenswrapper[4868]: I1003 14:03:15.149189 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:15 crc kubenswrapper[4868]: I1003 14:03:15.945741 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:16 crc kubenswrapper[4868]: I1003 14:03:16.018443 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w9tg2"] Oct 03 14:03:16 crc kubenswrapper[4868]: I1003 14:03:16.543994 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:03:16 crc kubenswrapper[4868]: E1003 14:03:16.544398 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:03:17 crc kubenswrapper[4868]: I1003 14:03:17.915899 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w9tg2" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="registry-server" containerID="cri-o://fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae" gracePeriod=2 Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.408289 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.485683 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ww68n\" (UniqueName: \"kubernetes.io/projected/759410ab-7bc0-451d-b936-10d425df4ba3-kube-api-access-ww68n\") pod \"759410ab-7bc0-451d-b936-10d425df4ba3\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.485862 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-catalog-content\") pod \"759410ab-7bc0-451d-b936-10d425df4ba3\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.485979 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-utilities\") pod \"759410ab-7bc0-451d-b936-10d425df4ba3\" (UID: \"759410ab-7bc0-451d-b936-10d425df4ba3\") " Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.488265 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-utilities" (OuterVolumeSpecName: "utilities") pod "759410ab-7bc0-451d-b936-10d425df4ba3" (UID: "759410ab-7bc0-451d-b936-10d425df4ba3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.494612 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/759410ab-7bc0-451d-b936-10d425df4ba3-kube-api-access-ww68n" (OuterVolumeSpecName: "kube-api-access-ww68n") pod "759410ab-7bc0-451d-b936-10d425df4ba3" (UID: "759410ab-7bc0-451d-b936-10d425df4ba3"). InnerVolumeSpecName "kube-api-access-ww68n". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.588345 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ww68n\" (UniqueName: \"kubernetes.io/projected/759410ab-7bc0-451d-b936-10d425df4ba3-kube-api-access-ww68n\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.588388 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.926433 4868 generic.go:334] "Generic (PLEG): container finished" podID="759410ab-7bc0-451d-b936-10d425df4ba3" containerID="fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae" exitCode=0 Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.926487 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w9tg2" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.926499 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9tg2" event={"ID":"759410ab-7bc0-451d-b936-10d425df4ba3","Type":"ContainerDied","Data":"fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae"} Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.926553 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w9tg2" event={"ID":"759410ab-7bc0-451d-b936-10d425df4ba3","Type":"ContainerDied","Data":"331cfee1420eb322753b349442ebf4a6a90f9bb25e061bb74b2ecda82dbe67f7"} Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.926574 4868 scope.go:117] "RemoveContainer" containerID="fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.960044 4868 scope.go:117] "RemoveContainer" containerID="9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3" Oct 03 14:03:18 crc kubenswrapper[4868]: I1003 14:03:18.984954 4868 scope.go:117] "RemoveContainer" containerID="fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.041080 4868 scope.go:117] "RemoveContainer" containerID="fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae" Oct 03 14:03:19 crc kubenswrapper[4868]: E1003 14:03:19.041871 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae\": container with ID starting with fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae not found: ID does not exist" containerID="fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.041925 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae"} err="failed to get container status \"fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae\": rpc error: code = NotFound desc = could not find container \"fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae\": container with ID starting with fbbe09dbaa1f0e3eda6438134bd4e51eb40def775d16a792fcba36dc46232eae not found: ID does not exist" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.041947 4868 scope.go:117] "RemoveContainer" containerID="9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3" Oct 03 14:03:19 crc kubenswrapper[4868]: E1003 14:03:19.042396 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3\": container with ID starting with 9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3 not found: ID does not exist" containerID="9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.042431 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3"} err="failed to get container status \"9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3\": rpc error: code = NotFound desc = could not find container \"9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3\": container with ID starting with 9ae752d7b02bf3295f5d669ececd9f8e0270aee9f8211fe9bdd3b8cb22dd51e3 not found: ID does not exist" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.042453 4868 scope.go:117] "RemoveContainer" containerID="fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164" Oct 03 14:03:19 crc kubenswrapper[4868]: E1003 14:03:19.042789 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164\": container with ID starting with fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164 not found: ID does not exist" containerID="fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.042810 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164"} err="failed to get container status \"fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164\": rpc error: code = NotFound desc = could not find container \"fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164\": container with ID starting with fd61dea2f8357104ba0e97d4db7e1ffd0fda233dfbc877d37a70d8bbaaa74164 not found: ID does not exist" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.102722 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "759410ab-7bc0-451d-b936-10d425df4ba3" (UID: "759410ab-7bc0-451d-b936-10d425df4ba3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.200535 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/759410ab-7bc0-451d-b936-10d425df4ba3-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.267078 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w9tg2"] Oct 03 14:03:19 crc kubenswrapper[4868]: I1003 14:03:19.283105 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w9tg2"] Oct 03 14:03:20 crc kubenswrapper[4868]: I1003 14:03:20.555899 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" path="/var/lib/kubelet/pods/759410ab-7bc0-451d-b936-10d425df4ba3/volumes" Oct 03 14:03:29 crc kubenswrapper[4868]: I1003 14:03:29.544397 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:03:29 crc kubenswrapper[4868]: E1003 14:03:29.545648 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:03:43 crc kubenswrapper[4868]: I1003 14:03:43.544438 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:03:43 crc kubenswrapper[4868]: E1003 14:03:43.545338 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:03:58 crc kubenswrapper[4868]: I1003 14:03:58.544507 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:03:58 crc kubenswrapper[4868]: E1003 14:03:58.545321 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:04:10 crc kubenswrapper[4868]: I1003 14:04:10.544426 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:04:10 crc kubenswrapper[4868]: E1003 14:04:10.545272 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:04:24 crc kubenswrapper[4868]: I1003 14:04:24.557103 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:04:24 crc kubenswrapper[4868]: E1003 14:04:24.558870 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.015312 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-tb24k"] Oct 03 14:04:34 crc kubenswrapper[4868]: E1003 14:04:34.016672 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="extract-content" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.016692 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="extract-content" Oct 03 14:04:34 crc kubenswrapper[4868]: E1003 14:04:34.016711 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="extract-utilities" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.016720 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="extract-utilities" Oct 03 14:04:34 crc kubenswrapper[4868]: E1003 14:04:34.016732 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="registry-server" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.016739 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="registry-server" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.016950 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="759410ab-7bc0-451d-b936-10d425df4ba3" containerName="registry-server" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.018638 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.035653 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tb24k"] Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.177488 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-utilities\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.178628 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jg6cf\" (UniqueName: \"kubernetes.io/projected/5f6a791d-1ead-4d61-9312-9239fee09c18-kube-api-access-jg6cf\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.178939 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-catalog-content\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.280995 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jg6cf\" (UniqueName: \"kubernetes.io/projected/5f6a791d-1ead-4d61-9312-9239fee09c18-kube-api-access-jg6cf\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.281119 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-catalog-content\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.281167 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-utilities\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.281724 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-catalog-content\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.281759 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-utilities\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.306426 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jg6cf\" (UniqueName: \"kubernetes.io/projected/5f6a791d-1ead-4d61-9312-9239fee09c18-kube-api-access-jg6cf\") pod \"redhat-marketplace-tb24k\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.376655 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:34 crc kubenswrapper[4868]: I1003 14:04:34.807887 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-tb24k"] Oct 03 14:04:35 crc kubenswrapper[4868]: I1003 14:04:35.641500 4868 generic.go:334] "Generic (PLEG): container finished" podID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerID="903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d" exitCode=0 Oct 03 14:04:35 crc kubenswrapper[4868]: I1003 14:04:35.641578 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tb24k" event={"ID":"5f6a791d-1ead-4d61-9312-9239fee09c18","Type":"ContainerDied","Data":"903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d"} Oct 03 14:04:35 crc kubenswrapper[4868]: I1003 14:04:35.644258 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tb24k" event={"ID":"5f6a791d-1ead-4d61-9312-9239fee09c18","Type":"ContainerStarted","Data":"d5a37424fe94ab500fef3ce58028d9b431d8eb95f80f35c563ad3872086a1232"} Oct 03 14:04:35 crc kubenswrapper[4868]: I1003 14:04:35.643543 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:04:37 crc kubenswrapper[4868]: I1003 14:04:37.545307 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:04:37 crc kubenswrapper[4868]: E1003 14:04:37.546285 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:04:37 crc kubenswrapper[4868]: I1003 14:04:37.661841 4868 generic.go:334] "Generic (PLEG): container finished" podID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerID="23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741" exitCode=0 Oct 03 14:04:37 crc kubenswrapper[4868]: I1003 14:04:37.661877 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tb24k" event={"ID":"5f6a791d-1ead-4d61-9312-9239fee09c18","Type":"ContainerDied","Data":"23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741"} Oct 03 14:04:39 crc kubenswrapper[4868]: I1003 14:04:39.689152 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tb24k" event={"ID":"5f6a791d-1ead-4d61-9312-9239fee09c18","Type":"ContainerStarted","Data":"57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57"} Oct 03 14:04:39 crc kubenswrapper[4868]: I1003 14:04:39.708979 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-tb24k" podStartSLOduration=3.862672738 podStartE2EDuration="6.708961304s" podCreationTimestamp="2025-10-03 14:04:33 +0000 UTC" firstStartedPulling="2025-10-03 14:04:35.643122973 +0000 UTC m=+4471.852972039" lastFinishedPulling="2025-10-03 14:04:38.489411539 +0000 UTC m=+4474.699260605" observedRunningTime="2025-10-03 14:04:39.706399016 +0000 UTC m=+4475.916248102" watchObservedRunningTime="2025-10-03 14:04:39.708961304 +0000 UTC m=+4475.918810370" Oct 03 14:04:44 crc kubenswrapper[4868]: I1003 14:04:44.377729 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:44 crc kubenswrapper[4868]: I1003 14:04:44.378277 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:44 crc kubenswrapper[4868]: I1003 14:04:44.427228 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:44 crc kubenswrapper[4868]: I1003 14:04:44.773596 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:44 crc kubenswrapper[4868]: I1003 14:04:44.823859 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tb24k"] Oct 03 14:04:46 crc kubenswrapper[4868]: I1003 14:04:46.746647 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-tb24k" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="registry-server" containerID="cri-o://57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57" gracePeriod=2 Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.566144 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.720256 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-catalog-content\") pod \"5f6a791d-1ead-4d61-9312-9239fee09c18\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.720517 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jg6cf\" (UniqueName: \"kubernetes.io/projected/5f6a791d-1ead-4d61-9312-9239fee09c18-kube-api-access-jg6cf\") pod \"5f6a791d-1ead-4d61-9312-9239fee09c18\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.721526 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-utilities\") pod \"5f6a791d-1ead-4d61-9312-9239fee09c18\" (UID: \"5f6a791d-1ead-4d61-9312-9239fee09c18\") " Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.722452 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-utilities" (OuterVolumeSpecName: "utilities") pod "5f6a791d-1ead-4d61-9312-9239fee09c18" (UID: "5f6a791d-1ead-4d61-9312-9239fee09c18"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.723943 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.733873 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f6a791d-1ead-4d61-9312-9239fee09c18-kube-api-access-jg6cf" (OuterVolumeSpecName: "kube-api-access-jg6cf") pod "5f6a791d-1ead-4d61-9312-9239fee09c18" (UID: "5f6a791d-1ead-4d61-9312-9239fee09c18"). InnerVolumeSpecName "kube-api-access-jg6cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.742558 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5f6a791d-1ead-4d61-9312-9239fee09c18" (UID: "5f6a791d-1ead-4d61-9312-9239fee09c18"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.763874 4868 generic.go:334] "Generic (PLEG): container finished" podID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerID="57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57" exitCode=0 Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.763927 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tb24k" event={"ID":"5f6a791d-1ead-4d61-9312-9239fee09c18","Type":"ContainerDied","Data":"57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57"} Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.763959 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-tb24k" event={"ID":"5f6a791d-1ead-4d61-9312-9239fee09c18","Type":"ContainerDied","Data":"d5a37424fe94ab500fef3ce58028d9b431d8eb95f80f35c563ad3872086a1232"} Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.763981 4868 scope.go:117] "RemoveContainer" containerID="57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.765268 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-tb24k" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.805484 4868 scope.go:117] "RemoveContainer" containerID="23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.808602 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-tb24k"] Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.820240 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-tb24k"] Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.826244 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5f6a791d-1ead-4d61-9312-9239fee09c18-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.826289 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jg6cf\" (UniqueName: \"kubernetes.io/projected/5f6a791d-1ead-4d61-9312-9239fee09c18-kube-api-access-jg6cf\") on node \"crc\" DevicePath \"\"" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.830386 4868 scope.go:117] "RemoveContainer" containerID="903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.873027 4868 scope.go:117] "RemoveContainer" containerID="57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57" Oct 03 14:04:47 crc kubenswrapper[4868]: E1003 14:04:47.874464 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57\": container with ID starting with 57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57 not found: ID does not exist" containerID="57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.874589 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57"} err="failed to get container status \"57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57\": rpc error: code = NotFound desc = could not find container \"57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57\": container with ID starting with 57369663c9250a24758460965aaeda3b48d4c9728af147f1ade3856cbf30af57 not found: ID does not exist" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.874760 4868 scope.go:117] "RemoveContainer" containerID="23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741" Oct 03 14:04:47 crc kubenswrapper[4868]: E1003 14:04:47.875184 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741\": container with ID starting with 23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741 not found: ID does not exist" containerID="23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.875318 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741"} err="failed to get container status \"23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741\": rpc error: code = NotFound desc = could not find container \"23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741\": container with ID starting with 23cdb446a8087f87afeead86ca16c412f14dbc5dcac0eb4749704bef41606741 not found: ID does not exist" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.875414 4868 scope.go:117] "RemoveContainer" containerID="903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d" Oct 03 14:04:47 crc kubenswrapper[4868]: E1003 14:04:47.875706 4868 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d\": container with ID starting with 903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d not found: ID does not exist" containerID="903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d" Oct 03 14:04:47 crc kubenswrapper[4868]: I1003 14:04:47.875811 4868 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d"} err="failed to get container status \"903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d\": rpc error: code = NotFound desc = could not find container \"903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d\": container with ID starting with 903823eb5f79e7599785124c6b714e1b07ba496893425a8dbe5ba502d7ba025d not found: ID does not exist" Oct 03 14:04:48 crc kubenswrapper[4868]: I1003 14:04:48.560708 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" path="/var/lib/kubelet/pods/5f6a791d-1ead-4d61-9312-9239fee09c18/volumes" Oct 03 14:04:51 crc kubenswrapper[4868]: I1003 14:04:51.544294 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:04:51 crc kubenswrapper[4868]: E1003 14:04:51.545201 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:05:06 crc kubenswrapper[4868]: I1003 14:05:06.546830 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:05:06 crc kubenswrapper[4868]: E1003 14:05:06.547745 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:05:21 crc kubenswrapper[4868]: I1003 14:05:21.545719 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:05:21 crc kubenswrapper[4868]: E1003 14:05:21.546928 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:05:34 crc kubenswrapper[4868]: I1003 14:05:34.552226 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:05:34 crc kubenswrapper[4868]: E1003 14:05:34.554637 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:05:48 crc kubenswrapper[4868]: I1003 14:05:48.544358 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:05:48 crc kubenswrapper[4868]: E1003 14:05:48.546278 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:06:02 crc kubenswrapper[4868]: I1003 14:06:02.544547 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:06:02 crc kubenswrapper[4868]: E1003 14:06:02.546007 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:06:16 crc kubenswrapper[4868]: I1003 14:06:16.543581 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:06:16 crc kubenswrapper[4868]: E1003 14:06:16.545775 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:06:27 crc kubenswrapper[4868]: I1003 14:06:27.545600 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:06:27 crc kubenswrapper[4868]: E1003 14:06:27.546607 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:06:39 crc kubenswrapper[4868]: I1003 14:06:39.544182 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:06:39 crc kubenswrapper[4868]: E1003 14:06:39.544984 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:06:53 crc kubenswrapper[4868]: I1003 14:06:53.545121 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:06:53 crc kubenswrapper[4868]: E1003 14:06:53.546216 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:07:07 crc kubenswrapper[4868]: I1003 14:07:07.544032 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:07:07 crc kubenswrapper[4868]: E1003 14:07:07.544934 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:07:22 crc kubenswrapper[4868]: I1003 14:07:22.546447 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:07:22 crc kubenswrapper[4868]: E1003 14:07:22.547669 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:07:36 crc kubenswrapper[4868]: I1003 14:07:36.544270 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:07:36 crc kubenswrapper[4868]: E1003 14:07:36.545083 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:07:38 crc kubenswrapper[4868]: I1003 14:07:38.706819 4868 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/swift-proxy-75c4dd668c-xqjsj" podUID="18a5e2cd-7517-4ef9-ab47-f4236b4bb836" containerName="proxy-httpd" probeResult="failure" output="HTTP probe failed with statuscode: 502" Oct 03 14:07:47 crc kubenswrapper[4868]: I1003 14:07:47.544360 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:07:47 crc kubenswrapper[4868]: E1003 14:07:47.545265 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:07:59 crc kubenswrapper[4868]: I1003 14:07:59.544475 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:07:59 crc kubenswrapper[4868]: E1003 14:07:59.546143 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:08:13 crc kubenswrapper[4868]: I1003 14:08:13.543819 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:08:14 crc kubenswrapper[4868]: I1003 14:08:14.664863 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"50ab464e21a460d675007abe7020159217d6b2fd8f4b92a001c35cbdf0895fd5"} Oct 03 14:10:32 crc kubenswrapper[4868]: I1003 14:10:32.145324 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:10:32 crc kubenswrapper[4868]: I1003 14:10:32.145869 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:11:02 crc kubenswrapper[4868]: I1003 14:11:02.145604 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:11:02 crc kubenswrapper[4868]: I1003 14:11:02.146176 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.145257 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.145834 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.145880 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.146722 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"50ab464e21a460d675007abe7020159217d6b2fd8f4b92a001c35cbdf0895fd5"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.146785 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://50ab464e21a460d675007abe7020159217d6b2fd8f4b92a001c35cbdf0895fd5" gracePeriod=600 Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.421770 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="50ab464e21a460d675007abe7020159217d6b2fd8f4b92a001c35cbdf0895fd5" exitCode=0 Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.421834 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"50ab464e21a460d675007abe7020159217d6b2fd8f4b92a001c35cbdf0895fd5"} Oct 03 14:11:32 crc kubenswrapper[4868]: I1003 14:11:32.421880 4868 scope.go:117] "RemoveContainer" containerID="812ac456eb18a1556c0669d94cea30dd15bf5d12157fe47313861c646e682264" Oct 03 14:11:33 crc kubenswrapper[4868]: I1003 14:11:33.434911 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d"} Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.503896 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kwsr7"] Oct 03 14:12:27 crc kubenswrapper[4868]: E1003 14:12:27.504932 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="registry-server" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.504973 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="registry-server" Oct 03 14:12:27 crc kubenswrapper[4868]: E1003 14:12:27.504991 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="extract-utilities" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.505000 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="extract-utilities" Oct 03 14:12:27 crc kubenswrapper[4868]: E1003 14:12:27.505019 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="extract-content" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.505027 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="extract-content" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.505293 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f6a791d-1ead-4d61-9312-9239fee09c18" containerName="registry-server" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.515539 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.523104 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kwsr7"] Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.689232 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6llqz\" (UniqueName: \"kubernetes.io/projected/8aeb95de-ea9a-4433-87b9-742de0ee74ad-kube-api-access-6llqz\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.689295 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-catalog-content\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.689396 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-utilities\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.791661 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6llqz\" (UniqueName: \"kubernetes.io/projected/8aeb95de-ea9a-4433-87b9-742de0ee74ad-kube-api-access-6llqz\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.791727 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-catalog-content\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.791790 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-utilities\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.792279 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-utilities\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.792371 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-catalog-content\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.810316 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6llqz\" (UniqueName: \"kubernetes.io/projected/8aeb95de-ea9a-4433-87b9-742de0ee74ad-kube-api-access-6llqz\") pod \"redhat-operators-kwsr7\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:27 crc kubenswrapper[4868]: I1003 14:12:27.838474 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:28 crc kubenswrapper[4868]: I1003 14:12:28.324726 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kwsr7"] Oct 03 14:12:28 crc kubenswrapper[4868]: I1003 14:12:28.956865 4868 generic.go:334] "Generic (PLEG): container finished" podID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerID="abd17ba1baa3edcb666dee3849518f00bef75a5c9914183e0ce353e2cf2b5136" exitCode=0 Oct 03 14:12:28 crc kubenswrapper[4868]: I1003 14:12:28.956909 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwsr7" event={"ID":"8aeb95de-ea9a-4433-87b9-742de0ee74ad","Type":"ContainerDied","Data":"abd17ba1baa3edcb666dee3849518f00bef75a5c9914183e0ce353e2cf2b5136"} Oct 03 14:12:28 crc kubenswrapper[4868]: I1003 14:12:28.957362 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwsr7" event={"ID":"8aeb95de-ea9a-4433-87b9-742de0ee74ad","Type":"ContainerStarted","Data":"02a219a0c3e59fa9b639ee69e9ba5cb231799db0e5c9c79d3c6c897f56c79844"} Oct 03 14:12:28 crc kubenswrapper[4868]: I1003 14:12:28.958922 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:12:30 crc kubenswrapper[4868]: I1003 14:12:30.975736 4868 generic.go:334] "Generic (PLEG): container finished" podID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerID="093577ec8f332d839d9fe8f1028e7f732cebacad3058fbcf8cba069281be8e12" exitCode=0 Oct 03 14:12:30 crc kubenswrapper[4868]: I1003 14:12:30.975824 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwsr7" event={"ID":"8aeb95de-ea9a-4433-87b9-742de0ee74ad","Type":"ContainerDied","Data":"093577ec8f332d839d9fe8f1028e7f732cebacad3058fbcf8cba069281be8e12"} Oct 03 14:12:33 crc kubenswrapper[4868]: I1003 14:12:33.004233 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwsr7" event={"ID":"8aeb95de-ea9a-4433-87b9-742de0ee74ad","Type":"ContainerStarted","Data":"ce88c411b4e3f6599794bdfc5593d21b7b22d5054486ca4e093f0f703a525ded"} Oct 03 14:12:33 crc kubenswrapper[4868]: I1003 14:12:33.024666 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kwsr7" podStartSLOduration=3.120224863 podStartE2EDuration="6.024649304s" podCreationTimestamp="2025-10-03 14:12:27 +0000 UTC" firstStartedPulling="2025-10-03 14:12:28.958650859 +0000 UTC m=+4945.168499925" lastFinishedPulling="2025-10-03 14:12:31.8630753 +0000 UTC m=+4948.072924366" observedRunningTime="2025-10-03 14:12:33.022577469 +0000 UTC m=+4949.232426555" watchObservedRunningTime="2025-10-03 14:12:33.024649304 +0000 UTC m=+4949.234498370" Oct 03 14:12:37 crc kubenswrapper[4868]: I1003 14:12:37.839201 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:37 crc kubenswrapper[4868]: I1003 14:12:37.839764 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:37 crc kubenswrapper[4868]: I1003 14:12:37.886435 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:38 crc kubenswrapper[4868]: I1003 14:12:38.105756 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:38 crc kubenswrapper[4868]: I1003 14:12:38.166532 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kwsr7"] Oct 03 14:12:40 crc kubenswrapper[4868]: I1003 14:12:40.080206 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kwsr7" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="registry-server" containerID="cri-o://ce88c411b4e3f6599794bdfc5593d21b7b22d5054486ca4e093f0f703a525ded" gracePeriod=2 Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.090949 4868 generic.go:334] "Generic (PLEG): container finished" podID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerID="ce88c411b4e3f6599794bdfc5593d21b7b22d5054486ca4e093f0f703a525ded" exitCode=0 Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.091288 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwsr7" event={"ID":"8aeb95de-ea9a-4433-87b9-742de0ee74ad","Type":"ContainerDied","Data":"ce88c411b4e3f6599794bdfc5593d21b7b22d5054486ca4e093f0f703a525ded"} Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.297268 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.464804 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-catalog-content\") pod \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.464947 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-utilities\") pod \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.465116 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6llqz\" (UniqueName: \"kubernetes.io/projected/8aeb95de-ea9a-4433-87b9-742de0ee74ad-kube-api-access-6llqz\") pod \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\" (UID: \"8aeb95de-ea9a-4433-87b9-742de0ee74ad\") " Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.469280 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-utilities" (OuterVolumeSpecName: "utilities") pod "8aeb95de-ea9a-4433-87b9-742de0ee74ad" (UID: "8aeb95de-ea9a-4433-87b9-742de0ee74ad"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.474558 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8aeb95de-ea9a-4433-87b9-742de0ee74ad-kube-api-access-6llqz" (OuterVolumeSpecName: "kube-api-access-6llqz") pod "8aeb95de-ea9a-4433-87b9-742de0ee74ad" (UID: "8aeb95de-ea9a-4433-87b9-742de0ee74ad"). InnerVolumeSpecName "kube-api-access-6llqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.568409 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6llqz\" (UniqueName: \"kubernetes.io/projected/8aeb95de-ea9a-4433-87b9-742de0ee74ad-kube-api-access-6llqz\") on node \"crc\" DevicePath \"\"" Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.568477 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.583738 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8aeb95de-ea9a-4433-87b9-742de0ee74ad" (UID: "8aeb95de-ea9a-4433-87b9-742de0ee74ad"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:12:41 crc kubenswrapper[4868]: I1003 14:12:41.670980 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8aeb95de-ea9a-4433-87b9-742de0ee74ad-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.102094 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kwsr7" event={"ID":"8aeb95de-ea9a-4433-87b9-742de0ee74ad","Type":"ContainerDied","Data":"02a219a0c3e59fa9b639ee69e9ba5cb231799db0e5c9c79d3c6c897f56c79844"} Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.102170 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kwsr7" Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.102405 4868 scope.go:117] "RemoveContainer" containerID="ce88c411b4e3f6599794bdfc5593d21b7b22d5054486ca4e093f0f703a525ded" Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.126401 4868 scope.go:117] "RemoveContainer" containerID="093577ec8f332d839d9fe8f1028e7f732cebacad3058fbcf8cba069281be8e12" Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.131742 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kwsr7"] Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.139375 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kwsr7"] Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.177204 4868 scope.go:117] "RemoveContainer" containerID="abd17ba1baa3edcb666dee3849518f00bef75a5c9914183e0ce353e2cf2b5136" Oct 03 14:12:42 crc kubenswrapper[4868]: I1003 14:12:42.566336 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" path="/var/lib/kubelet/pods/8aeb95de-ea9a-4433-87b9-742de0ee74ad/volumes" Oct 03 14:13:32 crc kubenswrapper[4868]: I1003 14:13:32.145398 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:13:32 crc kubenswrapper[4868]: I1003 14:13:32.146080 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:14:02 crc kubenswrapper[4868]: I1003 14:14:02.145582 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:14:02 crc kubenswrapper[4868]: I1003 14:14:02.152121 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:14:32 crc kubenswrapper[4868]: I1003 14:14:32.145712 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:14:32 crc kubenswrapper[4868]: I1003 14:14:32.146332 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:14:32 crc kubenswrapper[4868]: I1003 14:14:32.146394 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 14:14:32 crc kubenswrapper[4868]: I1003 14:14:32.147338 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:14:32 crc kubenswrapper[4868]: I1003 14:14:32.147412 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" gracePeriod=600 Oct 03 14:14:32 crc kubenswrapper[4868]: E1003 14:14:32.276146 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:14:33 crc kubenswrapper[4868]: I1003 14:14:33.144691 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" exitCode=0 Oct 03 14:14:33 crc kubenswrapper[4868]: I1003 14:14:33.144754 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d"} Oct 03 14:14:33 crc kubenswrapper[4868]: I1003 14:14:33.145025 4868 scope.go:117] "RemoveContainer" containerID="50ab464e21a460d675007abe7020159217d6b2fd8f4b92a001c35cbdf0895fd5" Oct 03 14:14:33 crc kubenswrapper[4868]: I1003 14:14:33.146154 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:14:33 crc kubenswrapper[4868]: E1003 14:14:33.146572 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:14:46 crc kubenswrapper[4868]: I1003 14:14:46.545168 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:14:46 crc kubenswrapper[4868]: E1003 14:14:46.545892 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.142270 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv"] Oct 03 14:15:00 crc kubenswrapper[4868]: E1003 14:15:00.143197 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="extract-content" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.143212 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="extract-content" Oct 03 14:15:00 crc kubenswrapper[4868]: E1003 14:15:00.143234 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="registry-server" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.143239 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="registry-server" Oct 03 14:15:00 crc kubenswrapper[4868]: E1003 14:15:00.143263 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="extract-utilities" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.143269 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="extract-utilities" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.143466 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="8aeb95de-ea9a-4433-87b9-742de0ee74ad" containerName="registry-server" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.144139 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.148533 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.148945 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.168335 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv"] Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.242423 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eec24e75-f339-4c23-bdf8-9ae603775cff-config-volume\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.242469 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpphq\" (UniqueName: \"kubernetes.io/projected/eec24e75-f339-4c23-bdf8-9ae603775cff-kube-api-access-kpphq\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.242577 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eec24e75-f339-4c23-bdf8-9ae603775cff-secret-volume\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.344141 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eec24e75-f339-4c23-bdf8-9ae603775cff-secret-volume\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.344281 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eec24e75-f339-4c23-bdf8-9ae603775cff-config-volume\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.344306 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpphq\" (UniqueName: \"kubernetes.io/projected/eec24e75-f339-4c23-bdf8-9ae603775cff-kube-api-access-kpphq\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.345411 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eec24e75-f339-4c23-bdf8-9ae603775cff-config-volume\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.350486 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eec24e75-f339-4c23-bdf8-9ae603775cff-secret-volume\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.360933 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpphq\" (UniqueName: \"kubernetes.io/projected/eec24e75-f339-4c23-bdf8-9ae603775cff-kube-api-access-kpphq\") pod \"collect-profiles-29325015-bfglv\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.472454 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.544301 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:15:00 crc kubenswrapper[4868]: E1003 14:15:00.544600 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:15:00 crc kubenswrapper[4868]: I1003 14:15:00.927075 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv"] Oct 03 14:15:01 crc kubenswrapper[4868]: I1003 14:15:01.408127 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" event={"ID":"eec24e75-f339-4c23-bdf8-9ae603775cff","Type":"ContainerStarted","Data":"926f639719a48a641a88c5643b9ecc777ee46036aa59d973dda75f2b6ac7fc8f"} Oct 03 14:15:01 crc kubenswrapper[4868]: I1003 14:15:01.408456 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" event={"ID":"eec24e75-f339-4c23-bdf8-9ae603775cff","Type":"ContainerStarted","Data":"b2bb06dc338d0fcaa1c472fa4255ce494ff50ba89d915946805cf36ead5d5d01"} Oct 03 14:15:01 crc kubenswrapper[4868]: I1003 14:15:01.429154 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" podStartSLOduration=1.4291316969999999 podStartE2EDuration="1.429131697s" podCreationTimestamp="2025-10-03 14:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 14:15:01.421844084 +0000 UTC m=+5097.631693170" watchObservedRunningTime="2025-10-03 14:15:01.429131697 +0000 UTC m=+5097.638980763" Oct 03 14:15:02 crc kubenswrapper[4868]: I1003 14:15:02.420595 4868 generic.go:334] "Generic (PLEG): container finished" podID="eec24e75-f339-4c23-bdf8-9ae603775cff" containerID="926f639719a48a641a88c5643b9ecc777ee46036aa59d973dda75f2b6ac7fc8f" exitCode=0 Oct 03 14:15:02 crc kubenswrapper[4868]: I1003 14:15:02.420690 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" event={"ID":"eec24e75-f339-4c23-bdf8-9ae603775cff","Type":"ContainerDied","Data":"926f639719a48a641a88c5643b9ecc777ee46036aa59d973dda75f2b6ac7fc8f"} Oct 03 14:15:03 crc kubenswrapper[4868]: I1003 14:15:03.768045 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:03 crc kubenswrapper[4868]: I1003 14:15:03.910803 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eec24e75-f339-4c23-bdf8-9ae603775cff-secret-volume\") pod \"eec24e75-f339-4c23-bdf8-9ae603775cff\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " Oct 03 14:15:03 crc kubenswrapper[4868]: I1003 14:15:03.910924 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eec24e75-f339-4c23-bdf8-9ae603775cff-config-volume\") pod \"eec24e75-f339-4c23-bdf8-9ae603775cff\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " Oct 03 14:15:03 crc kubenswrapper[4868]: I1003 14:15:03.910995 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kpphq\" (UniqueName: \"kubernetes.io/projected/eec24e75-f339-4c23-bdf8-9ae603775cff-kube-api-access-kpphq\") pod \"eec24e75-f339-4c23-bdf8-9ae603775cff\" (UID: \"eec24e75-f339-4c23-bdf8-9ae603775cff\") " Oct 03 14:15:03 crc kubenswrapper[4868]: I1003 14:15:03.911833 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eec24e75-f339-4c23-bdf8-9ae603775cff-config-volume" (OuterVolumeSpecName: "config-volume") pod "eec24e75-f339-4c23-bdf8-9ae603775cff" (UID: "eec24e75-f339-4c23-bdf8-9ae603775cff"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Oct 03 14:15:03 crc kubenswrapper[4868]: I1003 14:15:03.918980 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eec24e75-f339-4c23-bdf8-9ae603775cff-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "eec24e75-f339-4c23-bdf8-9ae603775cff" (UID: "eec24e75-f339-4c23-bdf8-9ae603775cff"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Oct 03 14:15:03 crc kubenswrapper[4868]: I1003 14:15:03.919241 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eec24e75-f339-4c23-bdf8-9ae603775cff-kube-api-access-kpphq" (OuterVolumeSpecName: "kube-api-access-kpphq") pod "eec24e75-f339-4c23-bdf8-9ae603775cff" (UID: "eec24e75-f339-4c23-bdf8-9ae603775cff"). InnerVolumeSpecName "kube-api-access-kpphq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.012899 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kpphq\" (UniqueName: \"kubernetes.io/projected/eec24e75-f339-4c23-bdf8-9ae603775cff-kube-api-access-kpphq\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.012944 4868 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eec24e75-f339-4c23-bdf8-9ae603775cff-secret-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.012954 4868 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eec24e75-f339-4c23-bdf8-9ae603775cff-config-volume\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.441005 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" event={"ID":"eec24e75-f339-4c23-bdf8-9ae603775cff","Type":"ContainerDied","Data":"b2bb06dc338d0fcaa1c472fa4255ce494ff50ba89d915946805cf36ead5d5d01"} Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.441355 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b2bb06dc338d0fcaa1c472fa4255ce494ff50ba89d915946805cf36ead5d5d01" Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.441097 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29325015-bfglv" Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.484793 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx"] Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.492783 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29324970-t8shx"] Oct 03 14:15:04 crc kubenswrapper[4868]: I1003 14:15:04.554427 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7f7e978-056c-48f1-bc05-c0368695ee2b" path="/var/lib/kubelet/pods/a7f7e978-056c-48f1-bc05-c0368695ee2b/volumes" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.004933 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-rg79p"] Oct 03 14:15:14 crc kubenswrapper[4868]: E1003 14:15:14.006305 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eec24e75-f339-4c23-bdf8-9ae603775cff" containerName="collect-profiles" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.006321 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="eec24e75-f339-4c23-bdf8-9ae603775cff" containerName="collect-profiles" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.006540 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="eec24e75-f339-4c23-bdf8-9ae603775cff" containerName="collect-profiles" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.008288 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.021862 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rg79p"] Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.121944 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-utilities\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.122318 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nw894\" (UniqueName: \"kubernetes.io/projected/d8006204-7b08-4490-97ee-2d2077eaca96-kube-api-access-nw894\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.122372 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-catalog-content\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.203286 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mvmvh"] Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.205174 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.222456 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mvmvh"] Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.223544 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-utilities\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.223593 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nw894\" (UniqueName: \"kubernetes.io/projected/d8006204-7b08-4490-97ee-2d2077eaca96-kube-api-access-nw894\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.223633 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-catalog-content\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.224145 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-catalog-content\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.224215 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-utilities\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.247171 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nw894\" (UniqueName: \"kubernetes.io/projected/d8006204-7b08-4490-97ee-2d2077eaca96-kube-api-access-nw894\") pod \"community-operators-rg79p\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.325756 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-catalog-content\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.325940 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-utilities\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.325970 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6chz\" (UniqueName: \"kubernetes.io/projected/18a9dbcc-01a0-40bd-be89-b3333449d24c-kube-api-access-r6chz\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.342808 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.427633 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-utilities\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.427689 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6chz\" (UniqueName: \"kubernetes.io/projected/18a9dbcc-01a0-40bd-be89-b3333449d24c-kube-api-access-r6chz\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.427793 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-catalog-content\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.428652 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-catalog-content\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.428880 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-utilities\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.445933 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6chz\" (UniqueName: \"kubernetes.io/projected/18a9dbcc-01a0-40bd-be89-b3333449d24c-kube-api-access-r6chz\") pod \"certified-operators-mvmvh\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.607998 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:14 crc kubenswrapper[4868]: I1003 14:15:14.910028 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-rg79p"] Oct 03 14:15:15 crc kubenswrapper[4868]: W1003 14:15:15.089228 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18a9dbcc_01a0_40bd_be89_b3333449d24c.slice/crio-2ef447d15f427816261c805711e49cdada9bfb0d1e5b1901b54938ef805aa98c WatchSource:0}: Error finding container 2ef447d15f427816261c805711e49cdada9bfb0d1e5b1901b54938ef805aa98c: Status 404 returned error can't find the container with id 2ef447d15f427816261c805711e49cdada9bfb0d1e5b1901b54938ef805aa98c Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.089906 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mvmvh"] Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.543910 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:15:15 crc kubenswrapper[4868]: E1003 14:15:15.544479 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.547670 4868 generic.go:334] "Generic (PLEG): container finished" podID="d8006204-7b08-4490-97ee-2d2077eaca96" containerID="ad59e2563dfcc6d98ee92fbe675fe2dd39ff5ebb67fad5304d48879d59c5c209" exitCode=0 Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.547744 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rg79p" event={"ID":"d8006204-7b08-4490-97ee-2d2077eaca96","Type":"ContainerDied","Data":"ad59e2563dfcc6d98ee92fbe675fe2dd39ff5ebb67fad5304d48879d59c5c209"} Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.547765 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rg79p" event={"ID":"d8006204-7b08-4490-97ee-2d2077eaca96","Type":"ContainerStarted","Data":"fd895e2af94aac5146e91fe9559aadabc6e580446240c923fe64a72b882206a6"} Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.551671 4868 generic.go:334] "Generic (PLEG): container finished" podID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerID="191bb618f518f25de657aae55f57b91b79907050a1cefc3db3b48e51b9a204c9" exitCode=0 Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.551708 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mvmvh" event={"ID":"18a9dbcc-01a0-40bd-be89-b3333449d24c","Type":"ContainerDied","Data":"191bb618f518f25de657aae55f57b91b79907050a1cefc3db3b48e51b9a204c9"} Oct 03 14:15:15 crc kubenswrapper[4868]: I1003 14:15:15.551731 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mvmvh" event={"ID":"18a9dbcc-01a0-40bd-be89-b3333449d24c","Type":"ContainerStarted","Data":"2ef447d15f427816261c805711e49cdada9bfb0d1e5b1901b54938ef805aa98c"} Oct 03 14:15:17 crc kubenswrapper[4868]: I1003 14:15:17.242806 4868 scope.go:117] "RemoveContainer" containerID="50d1e5946fffb816e5f8cb4eeee9e7ff2cf7d5713c064c969fcfc24a931a3f0d" Oct 03 14:15:17 crc kubenswrapper[4868]: I1003 14:15:17.577829 4868 generic.go:334] "Generic (PLEG): container finished" podID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerID="4cf11231a85c0dc5e703aa0aab5d0bc47e55f94f5171c8fecb78dc116ff9a889" exitCode=0 Oct 03 14:15:17 crc kubenswrapper[4868]: I1003 14:15:17.578443 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mvmvh" event={"ID":"18a9dbcc-01a0-40bd-be89-b3333449d24c","Type":"ContainerDied","Data":"4cf11231a85c0dc5e703aa0aab5d0bc47e55f94f5171c8fecb78dc116ff9a889"} Oct 03 14:15:18 crc kubenswrapper[4868]: I1003 14:15:18.601461 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rg79p" event={"ID":"d8006204-7b08-4490-97ee-2d2077eaca96","Type":"ContainerStarted","Data":"a573a12e11ceb40b80fa5fecae05f2b547a0c196da7686ede34820b44c34731a"} Oct 03 14:15:18 crc kubenswrapper[4868]: I1003 14:15:18.801574 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rjgnn"] Oct 03 14:15:18 crc kubenswrapper[4868]: I1003 14:15:18.805611 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:18 crc kubenswrapper[4868]: I1003 14:15:18.822022 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rjgnn"] Oct 03 14:15:18 crc kubenswrapper[4868]: I1003 14:15:18.934728 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-utilities\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:18 crc kubenswrapper[4868]: I1003 14:15:18.934826 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h25hf\" (UniqueName: \"kubernetes.io/projected/7dd10700-3aba-46c9-b57a-2a239c3cac5a-kube-api-access-h25hf\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:18 crc kubenswrapper[4868]: I1003 14:15:18.934920 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-catalog-content\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.036475 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-catalog-content\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.036572 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-utilities\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.036618 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h25hf\" (UniqueName: \"kubernetes.io/projected/7dd10700-3aba-46c9-b57a-2a239c3cac5a-kube-api-access-h25hf\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.037685 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-catalog-content\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.037913 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-utilities\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.073414 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h25hf\" (UniqueName: \"kubernetes.io/projected/7dd10700-3aba-46c9-b57a-2a239c3cac5a-kube-api-access-h25hf\") pod \"redhat-marketplace-rjgnn\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.158709 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.618166 4868 generic.go:334] "Generic (PLEG): container finished" podID="d8006204-7b08-4490-97ee-2d2077eaca96" containerID="a573a12e11ceb40b80fa5fecae05f2b547a0c196da7686ede34820b44c34731a" exitCode=0 Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.618388 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rg79p" event={"ID":"d8006204-7b08-4490-97ee-2d2077eaca96","Type":"ContainerDied","Data":"a573a12e11ceb40b80fa5fecae05f2b547a0c196da7686ede34820b44c34731a"} Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.629272 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mvmvh" event={"ID":"18a9dbcc-01a0-40bd-be89-b3333449d24c","Type":"ContainerStarted","Data":"596436e743d58d1df24d083e398497eeb5f701283737d8cb009b01832a14a16c"} Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.691878 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mvmvh" podStartSLOduration=3.17029519 podStartE2EDuration="5.691851955s" podCreationTimestamp="2025-10-03 14:15:14 +0000 UTC" firstStartedPulling="2025-10-03 14:15:15.554318742 +0000 UTC m=+5111.764167808" lastFinishedPulling="2025-10-03 14:15:18.075875487 +0000 UTC m=+5114.285724573" observedRunningTime="2025-10-03 14:15:19.690446168 +0000 UTC m=+5115.900295234" watchObservedRunningTime="2025-10-03 14:15:19.691851955 +0000 UTC m=+5115.901701021" Oct 03 14:15:19 crc kubenswrapper[4868]: I1003 14:15:19.945395 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rjgnn"] Oct 03 14:15:19 crc kubenswrapper[4868]: W1003 14:15:19.959029 4868 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7dd10700_3aba_46c9_b57a_2a239c3cac5a.slice/crio-3659c9da63504f5923868ea404c7b6e51982519c302f5560290f98ac21dc67ac WatchSource:0}: Error finding container 3659c9da63504f5923868ea404c7b6e51982519c302f5560290f98ac21dc67ac: Status 404 returned error can't find the container with id 3659c9da63504f5923868ea404c7b6e51982519c302f5560290f98ac21dc67ac Oct 03 14:15:20 crc kubenswrapper[4868]: I1003 14:15:20.640572 4868 generic.go:334] "Generic (PLEG): container finished" podID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerID="40f44c80477d096368c2734e37aa11fd03b16e93f6d2bdcb9a7e9b228a0b4bf5" exitCode=0 Oct 03 14:15:20 crc kubenswrapper[4868]: I1003 14:15:20.640697 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rjgnn" event={"ID":"7dd10700-3aba-46c9-b57a-2a239c3cac5a","Type":"ContainerDied","Data":"40f44c80477d096368c2734e37aa11fd03b16e93f6d2bdcb9a7e9b228a0b4bf5"} Oct 03 14:15:20 crc kubenswrapper[4868]: I1003 14:15:20.641035 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rjgnn" event={"ID":"7dd10700-3aba-46c9-b57a-2a239c3cac5a","Type":"ContainerStarted","Data":"3659c9da63504f5923868ea404c7b6e51982519c302f5560290f98ac21dc67ac"} Oct 03 14:15:21 crc kubenswrapper[4868]: I1003 14:15:21.652448 4868 generic.go:334] "Generic (PLEG): container finished" podID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerID="62f04348ae565b646cd85a4386faf1d5e28ce19a1b4ded52772692ec15e68676" exitCode=0 Oct 03 14:15:21 crc kubenswrapper[4868]: I1003 14:15:21.652624 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rjgnn" event={"ID":"7dd10700-3aba-46c9-b57a-2a239c3cac5a","Type":"ContainerDied","Data":"62f04348ae565b646cd85a4386faf1d5e28ce19a1b4ded52772692ec15e68676"} Oct 03 14:15:21 crc kubenswrapper[4868]: I1003 14:15:21.659928 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rg79p" event={"ID":"d8006204-7b08-4490-97ee-2d2077eaca96","Type":"ContainerStarted","Data":"f4be35595b1744646a0e87c7bbce42f0d2228c1a682ccff074736406805a7011"} Oct 03 14:15:21 crc kubenswrapper[4868]: I1003 14:15:21.696942 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-rg79p" podStartSLOduration=3.535974452 podStartE2EDuration="8.696917651s" podCreationTimestamp="2025-10-03 14:15:13 +0000 UTC" firstStartedPulling="2025-10-03 14:15:15.549045553 +0000 UTC m=+5111.758894619" lastFinishedPulling="2025-10-03 14:15:20.709988752 +0000 UTC m=+5116.919837818" observedRunningTime="2025-10-03 14:15:21.689556926 +0000 UTC m=+5117.899405992" watchObservedRunningTime="2025-10-03 14:15:21.696917651 +0000 UTC m=+5117.906766717" Oct 03 14:15:23 crc kubenswrapper[4868]: I1003 14:15:23.686436 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rjgnn" event={"ID":"7dd10700-3aba-46c9-b57a-2a239c3cac5a","Type":"ContainerStarted","Data":"ed714c9a2ee074c30ccf77327844bc8ee3a1037e6e7e63d37817ea48638671c5"} Oct 03 14:15:23 crc kubenswrapper[4868]: I1003 14:15:23.711044 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rjgnn" podStartSLOduration=3.844272571 podStartE2EDuration="5.711020256s" podCreationTimestamp="2025-10-03 14:15:18 +0000 UTC" firstStartedPulling="2025-10-03 14:15:20.642191657 +0000 UTC m=+5116.852040723" lastFinishedPulling="2025-10-03 14:15:22.508939332 +0000 UTC m=+5118.718788408" observedRunningTime="2025-10-03 14:15:23.704171515 +0000 UTC m=+5119.914020601" watchObservedRunningTime="2025-10-03 14:15:23.711020256 +0000 UTC m=+5119.920869332" Oct 03 14:15:24 crc kubenswrapper[4868]: I1003 14:15:24.342994 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:24 crc kubenswrapper[4868]: I1003 14:15:24.343155 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:24 crc kubenswrapper[4868]: I1003 14:15:24.395468 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:24 crc kubenswrapper[4868]: I1003 14:15:24.609298 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:24 crc kubenswrapper[4868]: I1003 14:15:24.609444 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:24 crc kubenswrapper[4868]: I1003 14:15:24.657838 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:24 crc kubenswrapper[4868]: I1003 14:15:24.735212 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:27 crc kubenswrapper[4868]: I1003 14:15:27.194231 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mvmvh"] Oct 03 14:15:27 crc kubenswrapper[4868]: I1003 14:15:27.196033 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mvmvh" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="registry-server" containerID="cri-o://596436e743d58d1df24d083e398497eeb5f701283737d8cb009b01832a14a16c" gracePeriod=2 Oct 03 14:15:27 crc kubenswrapper[4868]: I1003 14:15:27.730340 4868 generic.go:334] "Generic (PLEG): container finished" podID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerID="596436e743d58d1df24d083e398497eeb5f701283737d8cb009b01832a14a16c" exitCode=0 Oct 03 14:15:27 crc kubenswrapper[4868]: I1003 14:15:27.730391 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mvmvh" event={"ID":"18a9dbcc-01a0-40bd-be89-b3333449d24c","Type":"ContainerDied","Data":"596436e743d58d1df24d083e398497eeb5f701283737d8cb009b01832a14a16c"} Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.166973 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.223513 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-utilities\") pod \"18a9dbcc-01a0-40bd-be89-b3333449d24c\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.223669 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-catalog-content\") pod \"18a9dbcc-01a0-40bd-be89-b3333449d24c\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.223734 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6chz\" (UniqueName: \"kubernetes.io/projected/18a9dbcc-01a0-40bd-be89-b3333449d24c-kube-api-access-r6chz\") pod \"18a9dbcc-01a0-40bd-be89-b3333449d24c\" (UID: \"18a9dbcc-01a0-40bd-be89-b3333449d24c\") " Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.226209 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-utilities" (OuterVolumeSpecName: "utilities") pod "18a9dbcc-01a0-40bd-be89-b3333449d24c" (UID: "18a9dbcc-01a0-40bd-be89-b3333449d24c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.325678 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.751559 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mvmvh" event={"ID":"18a9dbcc-01a0-40bd-be89-b3333449d24c","Type":"ContainerDied","Data":"2ef447d15f427816261c805711e49cdada9bfb0d1e5b1901b54938ef805aa98c"} Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.751647 4868 scope.go:117] "RemoveContainer" containerID="596436e743d58d1df24d083e398497eeb5f701283737d8cb009b01832a14a16c" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.751650 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mvmvh" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.780956 4868 scope.go:117] "RemoveContainer" containerID="4cf11231a85c0dc5e703aa0aab5d0bc47e55f94f5171c8fecb78dc116ff9a889" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.862626 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18a9dbcc-01a0-40bd-be89-b3333449d24c-kube-api-access-r6chz" (OuterVolumeSpecName: "kube-api-access-r6chz") pod "18a9dbcc-01a0-40bd-be89-b3333449d24c" (UID: "18a9dbcc-01a0-40bd-be89-b3333449d24c"). InnerVolumeSpecName "kube-api-access-r6chz". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.881247 4868 scope.go:117] "RemoveContainer" containerID="191bb618f518f25de657aae55f57b91b79907050a1cefc3db3b48e51b9a204c9" Oct 03 14:15:28 crc kubenswrapper[4868]: I1003 14:15:28.938448 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6chz\" (UniqueName: \"kubernetes.io/projected/18a9dbcc-01a0-40bd-be89-b3333449d24c-kube-api-access-r6chz\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.159842 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.160026 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.210489 4868 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.570351 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "18a9dbcc-01a0-40bd-be89-b3333449d24c" (UID: "18a9dbcc-01a0-40bd-be89-b3333449d24c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.654697 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/18a9dbcc-01a0-40bd-be89-b3333449d24c-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.689222 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mvmvh"] Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.696600 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mvmvh"] Oct 03 14:15:29 crc kubenswrapper[4868]: I1003 14:15:29.809242 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:30 crc kubenswrapper[4868]: I1003 14:15:30.544586 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:15:30 crc kubenswrapper[4868]: E1003 14:15:30.545372 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:15:30 crc kubenswrapper[4868]: I1003 14:15:30.560367 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" path="/var/lib/kubelet/pods/18a9dbcc-01a0-40bd-be89-b3333449d24c/volumes" Oct 03 14:15:31 crc kubenswrapper[4868]: I1003 14:15:31.593394 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rjgnn"] Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.417558 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wlxzl/must-gather-x5t7b"] Oct 03 14:15:32 crc kubenswrapper[4868]: E1003 14:15:32.418324 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="registry-server" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.418345 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="registry-server" Oct 03 14:15:32 crc kubenswrapper[4868]: E1003 14:15:32.418372 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="extract-content" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.418379 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="extract-content" Oct 03 14:15:32 crc kubenswrapper[4868]: E1003 14:15:32.418394 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="extract-utilities" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.418401 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="extract-utilities" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.418614 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="18a9dbcc-01a0-40bd-be89-b3333449d24c" containerName="registry-server" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.419598 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.421427 4868 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-wlxzl"/"default-dockercfg-mstd8" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.421482 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wlxzl"/"openshift-service-ca.crt" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.423203 4868 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-wlxzl"/"kube-root-ca.crt" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.426843 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wlxzl/must-gather-x5t7b"] Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.512682 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-must-gather-output\") pod \"must-gather-x5t7b\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.512745 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tns2k\" (UniqueName: \"kubernetes.io/projected/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-kube-api-access-tns2k\") pod \"must-gather-x5t7b\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.614287 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-must-gather-output\") pod \"must-gather-x5t7b\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.614328 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tns2k\" (UniqueName: \"kubernetes.io/projected/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-kube-api-access-tns2k\") pod \"must-gather-x5t7b\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.614767 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-must-gather-output\") pod \"must-gather-x5t7b\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.635124 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tns2k\" (UniqueName: \"kubernetes.io/projected/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-kube-api-access-tns2k\") pod \"must-gather-x5t7b\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.743304 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:15:32 crc kubenswrapper[4868]: I1003 14:15:32.800434 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rjgnn" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="registry-server" containerID="cri-o://ed714c9a2ee074c30ccf77327844bc8ee3a1037e6e7e63d37817ea48638671c5" gracePeriod=2 Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.379895 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-wlxzl/must-gather-x5t7b"] Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.814790 4868 generic.go:334] "Generic (PLEG): container finished" podID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerID="ed714c9a2ee074c30ccf77327844bc8ee3a1037e6e7e63d37817ea48638671c5" exitCode=0 Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.814853 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rjgnn" event={"ID":"7dd10700-3aba-46c9-b57a-2a239c3cac5a","Type":"ContainerDied","Data":"ed714c9a2ee074c30ccf77327844bc8ee3a1037e6e7e63d37817ea48638671c5"} Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.816367 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" event={"ID":"25b4e0ac-2921-4866-be4f-d2a53a0f7e43","Type":"ContainerStarted","Data":"375f25879281061680eec1df89b65b614769521f52d4a4602999593b2bc18bc8"} Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.891356 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.943422 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h25hf\" (UniqueName: \"kubernetes.io/projected/7dd10700-3aba-46c9-b57a-2a239c3cac5a-kube-api-access-h25hf\") pod \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.943540 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-utilities\") pod \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.943891 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-catalog-content\") pod \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\" (UID: \"7dd10700-3aba-46c9-b57a-2a239c3cac5a\") " Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.945525 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-utilities" (OuterVolumeSpecName: "utilities") pod "7dd10700-3aba-46c9-b57a-2a239c3cac5a" (UID: "7dd10700-3aba-46c9-b57a-2a239c3cac5a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.949531 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7dd10700-3aba-46c9-b57a-2a239c3cac5a-kube-api-access-h25hf" (OuterVolumeSpecName: "kube-api-access-h25hf") pod "7dd10700-3aba-46c9-b57a-2a239c3cac5a" (UID: "7dd10700-3aba-46c9-b57a-2a239c3cac5a"). InnerVolumeSpecName "kube-api-access-h25hf". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:15:33 crc kubenswrapper[4868]: I1003 14:15:33.959180 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7dd10700-3aba-46c9-b57a-2a239c3cac5a" (UID: "7dd10700-3aba-46c9-b57a-2a239c3cac5a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.046636 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h25hf\" (UniqueName: \"kubernetes.io/projected/7dd10700-3aba-46c9-b57a-2a239c3cac5a-kube-api-access-h25hf\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.046666 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.046679 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7dd10700-3aba-46c9-b57a-2a239c3cac5a-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.404822 4868 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.827222 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rjgnn" event={"ID":"7dd10700-3aba-46c9-b57a-2a239c3cac5a","Type":"ContainerDied","Data":"3659c9da63504f5923868ea404c7b6e51982519c302f5560290f98ac21dc67ac"} Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.827535 4868 scope.go:117] "RemoveContainer" containerID="ed714c9a2ee074c30ccf77327844bc8ee3a1037e6e7e63d37817ea48638671c5" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.827347 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rjgnn" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.848804 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rjgnn"] Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.855502 4868 scope.go:117] "RemoveContainer" containerID="62f04348ae565b646cd85a4386faf1d5e28ce19a1b4ded52772692ec15e68676" Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.857604 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rjgnn"] Oct 03 14:15:34 crc kubenswrapper[4868]: I1003 14:15:34.880300 4868 scope.go:117] "RemoveContainer" containerID="40f44c80477d096368c2734e37aa11fd03b16e93f6d2bdcb9a7e9b228a0b4bf5" Oct 03 14:15:36 crc kubenswrapper[4868]: I1003 14:15:36.564904 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" path="/var/lib/kubelet/pods/7dd10700-3aba-46c9-b57a-2a239c3cac5a/volumes" Oct 03 14:15:36 crc kubenswrapper[4868]: I1003 14:15:36.798021 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rg79p"] Oct 03 14:15:36 crc kubenswrapper[4868]: I1003 14:15:36.805446 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-rg79p" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="registry-server" containerID="cri-o://f4be35595b1744646a0e87c7bbce42f0d2228c1a682ccff074736406805a7011" gracePeriod=2 Oct 03 14:15:37 crc kubenswrapper[4868]: I1003 14:15:37.865885 4868 generic.go:334] "Generic (PLEG): container finished" podID="d8006204-7b08-4490-97ee-2d2077eaca96" containerID="f4be35595b1744646a0e87c7bbce42f0d2228c1a682ccff074736406805a7011" exitCode=0 Oct 03 14:15:37 crc kubenswrapper[4868]: I1003 14:15:37.866195 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rg79p" event={"ID":"d8006204-7b08-4490-97ee-2d2077eaca96","Type":"ContainerDied","Data":"f4be35595b1744646a0e87c7bbce42f0d2228c1a682ccff074736406805a7011"} Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.344609 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.464285 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-catalog-content\") pod \"d8006204-7b08-4490-97ee-2d2077eaca96\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.464364 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nw894\" (UniqueName: \"kubernetes.io/projected/d8006204-7b08-4490-97ee-2d2077eaca96-kube-api-access-nw894\") pod \"d8006204-7b08-4490-97ee-2d2077eaca96\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.464427 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-utilities\") pod \"d8006204-7b08-4490-97ee-2d2077eaca96\" (UID: \"d8006204-7b08-4490-97ee-2d2077eaca96\") " Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.466718 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-utilities" (OuterVolumeSpecName: "utilities") pod "d8006204-7b08-4490-97ee-2d2077eaca96" (UID: "d8006204-7b08-4490-97ee-2d2077eaca96"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.527118 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8006204-7b08-4490-97ee-2d2077eaca96" (UID: "d8006204-7b08-4490-97ee-2d2077eaca96"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.557266 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8006204-7b08-4490-97ee-2d2077eaca96-kube-api-access-nw894" (OuterVolumeSpecName: "kube-api-access-nw894") pod "d8006204-7b08-4490-97ee-2d2077eaca96" (UID: "d8006204-7b08-4490-97ee-2d2077eaca96"). InnerVolumeSpecName "kube-api-access-nw894". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.567172 4868 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-catalog-content\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.567196 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nw894\" (UniqueName: \"kubernetes.io/projected/d8006204-7b08-4490-97ee-2d2077eaca96-kube-api-access-nw894\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.567207 4868 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8006204-7b08-4490-97ee-2d2077eaca96-utilities\") on node \"crc\" DevicePath \"\"" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.886812 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" event={"ID":"25b4e0ac-2921-4866-be4f-d2a53a0f7e43","Type":"ContainerStarted","Data":"ae54427f10096078181ae494f99670c8a1abeaa2fb10a9138c42f005b8c638e4"} Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.886871 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" event={"ID":"25b4e0ac-2921-4866-be4f-d2a53a0f7e43","Type":"ContainerStarted","Data":"337a092bcd958ae720c1060960f3a364e95411fe60fc35b2bc0328b623ef661f"} Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.889472 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-rg79p" event={"ID":"d8006204-7b08-4490-97ee-2d2077eaca96","Type":"ContainerDied","Data":"fd895e2af94aac5146e91fe9559aadabc6e580446240c923fe64a72b882206a6"} Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.889535 4868 scope.go:117] "RemoveContainer" containerID="f4be35595b1744646a0e87c7bbce42f0d2228c1a682ccff074736406805a7011" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.889726 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-rg79p" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.912322 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" podStartSLOduration=2.280046805 podStartE2EDuration="7.912301687s" podCreationTimestamp="2025-10-03 14:15:32 +0000 UTC" firstStartedPulling="2025-10-03 14:15:33.388269293 +0000 UTC m=+5129.598118359" lastFinishedPulling="2025-10-03 14:15:39.020524175 +0000 UTC m=+5135.230373241" observedRunningTime="2025-10-03 14:15:39.906812541 +0000 UTC m=+5136.116661607" watchObservedRunningTime="2025-10-03 14:15:39.912301687 +0000 UTC m=+5136.122150753" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.932571 4868 scope.go:117] "RemoveContainer" containerID="a573a12e11ceb40b80fa5fecae05f2b547a0c196da7686ede34820b44c34731a" Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.935218 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-rg79p"] Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.947015 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-rg79p"] Oct 03 14:15:39 crc kubenswrapper[4868]: I1003 14:15:39.973676 4868 scope.go:117] "RemoveContainer" containerID="ad59e2563dfcc6d98ee92fbe675fe2dd39ff5ebb67fad5304d48879d59c5c209" Oct 03 14:15:40 crc kubenswrapper[4868]: I1003 14:15:40.555370 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" path="/var/lib/kubelet/pods/d8006204-7b08-4490-97ee-2d2077eaca96/volumes" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.550027 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:15:44 crc kubenswrapper[4868]: E1003 14:15:44.550876 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.555494 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-klrcr"] Oct 03 14:15:44 crc kubenswrapper[4868]: E1003 14:15:44.555814 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="extract-content" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.555833 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="extract-content" Oct 03 14:15:44 crc kubenswrapper[4868]: E1003 14:15:44.555856 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="registry-server" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.555865 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="registry-server" Oct 03 14:15:44 crc kubenswrapper[4868]: E1003 14:15:44.555884 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="extract-content" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.555891 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="extract-content" Oct 03 14:15:44 crc kubenswrapper[4868]: E1003 14:15:44.555910 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="extract-utilities" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.555916 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="extract-utilities" Oct 03 14:15:44 crc kubenswrapper[4868]: E1003 14:15:44.555933 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="extract-utilities" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.555940 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="extract-utilities" Oct 03 14:15:44 crc kubenswrapper[4868]: E1003 14:15:44.555955 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="registry-server" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.555961 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="registry-server" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.556349 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8006204-7b08-4490-97ee-2d2077eaca96" containerName="registry-server" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.556370 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="7dd10700-3aba-46c9-b57a-2a239c3cac5a" containerName="registry-server" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.557029 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.669907 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcxtq\" (UniqueName: \"kubernetes.io/projected/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-kube-api-access-zcxtq\") pod \"crc-debug-klrcr\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.670477 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-host\") pod \"crc-debug-klrcr\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.772399 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-host\") pod \"crc-debug-klrcr\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.772483 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcxtq\" (UniqueName: \"kubernetes.io/projected/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-kube-api-access-zcxtq\") pod \"crc-debug-klrcr\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.772860 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-host\") pod \"crc-debug-klrcr\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.794695 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcxtq\" (UniqueName: \"kubernetes.io/projected/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-kube-api-access-zcxtq\") pod \"crc-debug-klrcr\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.879699 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:15:44 crc kubenswrapper[4868]: I1003 14:15:44.968560 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" event={"ID":"ebf1b280-d90d-4d7c-9c60-980c79f1b23d","Type":"ContainerStarted","Data":"7f47666017b60a70a355e752ba68ac0cd984071af4d16ff765920b620adaa116"} Oct 03 14:15:58 crc kubenswrapper[4868]: I1003 14:15:58.094909 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" event={"ID":"ebf1b280-d90d-4d7c-9c60-980c79f1b23d","Type":"ContainerStarted","Data":"a530e62c46d9de16036d1e77ec0292f838916fb935b43e9d21493c77c211d871"} Oct 03 14:15:58 crc kubenswrapper[4868]: I1003 14:15:58.110361 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" podStartSLOduration=1.986701491 podStartE2EDuration="14.110343513s" podCreationTimestamp="2025-10-03 14:15:44 +0000 UTC" firstStartedPulling="2025-10-03 14:15:44.917575636 +0000 UTC m=+5141.127424702" lastFinishedPulling="2025-10-03 14:15:57.041217658 +0000 UTC m=+5153.251066724" observedRunningTime="2025-10-03 14:15:58.107390975 +0000 UTC m=+5154.317240041" watchObservedRunningTime="2025-10-03 14:15:58.110343513 +0000 UTC m=+5154.320192579" Oct 03 14:15:59 crc kubenswrapper[4868]: I1003 14:15:59.544124 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:15:59 crc kubenswrapper[4868]: E1003 14:15:59.545501 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:16:13 crc kubenswrapper[4868]: I1003 14:16:13.544836 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:16:13 crc kubenswrapper[4868]: E1003 14:16:13.547997 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:16:27 crc kubenswrapper[4868]: I1003 14:16:27.544551 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:16:27 crc kubenswrapper[4868]: E1003 14:16:27.545579 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:16:39 crc kubenswrapper[4868]: I1003 14:16:39.544685 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:16:39 crc kubenswrapper[4868]: E1003 14:16:39.545485 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:16:47 crc kubenswrapper[4868]: I1003 14:16:47.016398 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cbcbd76c4-z459l_15bc6da3-2507-4029-80fb-fa480d30f199/barbican-api/0.log" Oct 03 14:16:47 crc kubenswrapper[4868]: I1003 14:16:47.109494 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-cbcbd76c4-z459l_15bc6da3-2507-4029-80fb-fa480d30f199/barbican-api-log/0.log" Oct 03 14:16:47 crc kubenswrapper[4868]: I1003 14:16:47.354708 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7dc9986bd6-bfv66_41df168a-4bf1-44a4-ba5a-2f398a82c8fb/barbican-keystone-listener/0.log" Oct 03 14:16:47 crc kubenswrapper[4868]: I1003 14:16:47.595430 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f8d674479-d2hdw_9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce/barbican-worker/0.log" Oct 03 14:16:47 crc kubenswrapper[4868]: I1003 14:16:47.706118 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-7f8d674479-d2hdw_9f7b38f9-0ea7-4dc0-9f08-f581473dd3ce/barbican-worker-log/0.log" Oct 03 14:16:47 crc kubenswrapper[4868]: I1003 14:16:47.714728 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7dc9986bd6-bfv66_41df168a-4bf1-44a4-ba5a-2f398a82c8fb/barbican-keystone-listener-log/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.001896 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-42hkc_788a1c65-a9bf-419d-aca4-464a22ece644/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.010969 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b55a39cc-8088-4a92-9976-b45a5e69ffd6/ceilometer-central-agent/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.194862 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b55a39cc-8088-4a92-9976-b45a5e69ffd6/ceilometer-notification-agent/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.257584 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b55a39cc-8088-4a92-9976-b45a5e69ffd6/sg-core/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.280800 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_b55a39cc-8088-4a92-9976-b45a5e69ffd6/proxy-httpd/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.474698 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_090414a6-8af1-4239-9263-ee2c57b89414/cinder-api-log/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.550335 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_090414a6-8af1-4239-9263-ee2c57b89414/cinder-api/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.798232 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c595ad51-042d-4867-9db2-68166545d242/probe/0.log" Oct 03 14:16:48 crc kubenswrapper[4868]: I1003 14:16:48.808859 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_c595ad51-042d-4867-9db2-68166545d242/cinder-scheduler/0.log" Oct 03 14:16:49 crc kubenswrapper[4868]: I1003 14:16:49.248680 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-98nqb_c36e73f9-31f4-474b-9f2d-e88328a747fc/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:49 crc kubenswrapper[4868]: I1003 14:16:49.528779 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-65tvx_8b59ed35-42bc-4fad-ad96-28152d3234cd/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:49 crc kubenswrapper[4868]: I1003 14:16:49.768133 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-dfl6l_afe26f8a-45f7-4f06-b6ef-3584fe8b51a7/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:49 crc kubenswrapper[4868]: I1003 14:16:49.798900 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-8hpcf_cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48/init/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.052539 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-8hpcf_cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48/init/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.181588 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-55478c4467-8hpcf_cdff8fa4-8a6c-4a18-bdee-6ec43e6d3d48/dnsmasq-dns/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.503460 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_847afc40-b6d8-4b55-9101-11d808ae4961/glance-log/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.584947 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_847afc40-b6d8-4b55-9101-11d808ae4961/glance-httpd/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.639362 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-bph2x_5ed03626-ece8-4aac-830d-ceef1eb2e5b8/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.729147 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ed50e7e6-260d-4ac7-99cf-cf935ec77577/glance-httpd/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.781641 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_ed50e7e6-260d-4ac7-99cf-cf935ec77577/glance-log/0.log" Oct 03 14:16:50 crc kubenswrapper[4868]: I1003 14:16:50.955658 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6545c458bd-ttzj2_8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f/horizon/0.log" Oct 03 14:16:51 crc kubenswrapper[4868]: I1003 14:16:51.245955 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-p8hpl_831c7a15-2c7a-4e7c-9908-65261a142070/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:51 crc kubenswrapper[4868]: I1003 14:16:51.404590 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-bdgll_3876c0c5-a630-4bf0-9072-3bcf5889430c/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:51 crc kubenswrapper[4868]: I1003 14:16:51.450064 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-6545c458bd-ttzj2_8fbf3ab7-41a0-44dc-9fc1-e74ec76d395f/horizon-log/0.log" Oct 03 14:16:51 crc kubenswrapper[4868]: I1003 14:16:51.651990 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29325001-w7zlw_246879f2-e7ef-4506-ac99-7a5bf0bcbe62/keystone-cron/0.log" Oct 03 14:16:51 crc kubenswrapper[4868]: I1003 14:16:51.672342 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-64cb6d74d6-hlvzr_d4f6f758-2853-4aa4-b040-f1c9501105a6/keystone-api/0.log" Oct 03 14:16:51 crc kubenswrapper[4868]: I1003 14:16:51.824457 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_a2f50a43-ea0f-4242-b3a1-4fbf1f76fca8/kube-state-metrics/0.log" Oct 03 14:16:52 crc kubenswrapper[4868]: I1003 14:16:52.090740 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-cfd6858f-9jn5h_2036b8f4-7d6a-46eb-9eb8-3d9827c878be/neutron-api/0.log" Oct 03 14:16:52 crc kubenswrapper[4868]: I1003 14:16:52.150723 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-cfd6858f-9jn5h_2036b8f4-7d6a-46eb-9eb8-3d9827c878be/neutron-httpd/0.log" Oct 03 14:16:52 crc kubenswrapper[4868]: I1003 14:16:52.838399 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_f1d25601-e520-4e7b-99e2-d35e6568d838/nova-api-log/0.log" Oct 03 14:16:53 crc kubenswrapper[4868]: I1003 14:16:53.202345 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_f1d25601-e520-4e7b-99e2-d35e6568d838/nova-api-api/0.log" Oct 03 14:16:53 crc kubenswrapper[4868]: I1003 14:16:53.234035 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_9fe73541-1e4f-4caf-9e26-7865eb2908f9/nova-cell0-conductor-conductor/0.log" Oct 03 14:16:53 crc kubenswrapper[4868]: I1003 14:16:53.577447 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_90aa0fe0-12ad-4477-b6f3-7ce50f480fd9/nova-cell1-conductor-conductor/0.log" Oct 03 14:16:53 crc kubenswrapper[4868]: I1003 14:16:53.617735 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_636e5929-0de9-4493-89e1-7844be486324/nova-cell1-novncproxy-novncproxy/0.log" Oct 03 14:16:53 crc kubenswrapper[4868]: I1003 14:16:53.865899 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b3e36433-b5ce-4428-ad2e-1bf31738b016/nova-metadata-log/0.log" Oct 03 14:16:54 crc kubenswrapper[4868]: I1003 14:16:54.426497 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_48e1d58c-c725-48c5-99f2-b3ce568d1136/nova-scheduler-scheduler/0.log" Oct 03 14:16:54 crc kubenswrapper[4868]: I1003 14:16:54.551329 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:16:54 crc kubenswrapper[4868]: E1003 14:16:54.551637 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:16:54 crc kubenswrapper[4868]: I1003 14:16:54.842308 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b28a570d-3c2f-43c5-8be6-908d8ecabb08/mysql-bootstrap/0.log" Oct 03 14:16:55 crc kubenswrapper[4868]: I1003 14:16:55.050746 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b28a570d-3c2f-43c5-8be6-908d8ecabb08/mysql-bootstrap/0.log" Oct 03 14:16:55 crc kubenswrapper[4868]: I1003 14:16:55.165935 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_b28a570d-3c2f-43c5-8be6-908d8ecabb08/galera/0.log" Oct 03 14:16:55 crc kubenswrapper[4868]: I1003 14:16:55.404108 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7a386b95-6440-43fb-88c4-9e48c2277ca5/mysql-bootstrap/0.log" Oct 03 14:16:55 crc kubenswrapper[4868]: I1003 14:16:55.589418 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7a386b95-6440-43fb-88c4-9e48c2277ca5/mysql-bootstrap/0.log" Oct 03 14:16:55 crc kubenswrapper[4868]: I1003 14:16:55.621186 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_7a386b95-6440-43fb-88c4-9e48c2277ca5/galera/0.log" Oct 03 14:16:55 crc kubenswrapper[4868]: I1003 14:16:55.838123 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_b3e36433-b5ce-4428-ad2e-1bf31738b016/nova-metadata-metadata/0.log" Oct 03 14:16:55 crc kubenswrapper[4868]: I1003 14:16:55.857550 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_cc1e34f6-e4f8-4287-a809-8f25736927d0/openstackclient/0.log" Oct 03 14:16:56 crc kubenswrapper[4868]: I1003 14:16:56.141777 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-x4xch_939a9e63-6947-478c-8a13-75d46852cf89/openstack-network-exporter/0.log" Oct 03 14:16:56 crc kubenswrapper[4868]: I1003 14:16:56.386026 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5lxjj_c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24/ovsdb-server-init/0.log" Oct 03 14:16:56 crc kubenswrapper[4868]: I1003 14:16:56.622378 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5lxjj_c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24/ovsdb-server/0.log" Oct 03 14:16:56 crc kubenswrapper[4868]: I1003 14:16:56.627640 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5lxjj_c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24/ovsdb-server-init/0.log" Oct 03 14:16:56 crc kubenswrapper[4868]: I1003 14:16:56.639592 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-5lxjj_c7e7a9d0-47c6-47b7-b0d1-1fd9b3fb9e24/ovs-vswitchd/0.log" Oct 03 14:16:56 crc kubenswrapper[4868]: I1003 14:16:56.861748 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-w4z7q_75727504-0a62-4459-add3-419d244f05ff/ovn-controller/0.log" Oct 03 14:16:57 crc kubenswrapper[4868]: I1003 14:16:57.186087 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-26cn4_466cdf6d-842e-4f9a-a1de-7f7471ec3c14/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:57 crc kubenswrapper[4868]: I1003 14:16:57.202845 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-5tcvz_eda000c8-8118-48c9-ac7b-42353619ea8d/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:57 crc kubenswrapper[4868]: I1003 14:16:57.428320 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-989zh_5f27ba9e-0599-4586-a237-6df89c605a4b/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:57 crc kubenswrapper[4868]: I1003 14:16:57.579149 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-h8tkn_e95bbe25-a27c-466d-8d6b-bf2d745a6429/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:57 crc kubenswrapper[4868]: I1003 14:16:57.693852 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-jvv5j_21a6937f-c0e2-4f48-b641-c010d75d5f52/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:57 crc kubenswrapper[4868]: I1003 14:16:57.890269 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-ng6mc_3564d986-0715-4a9a-acf7-caebd6007fcf/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.128641 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-r6j6x_b9f77be8-e1a6-4f09-9484-28f3a7fb60c9/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.190460 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_26ca4d65-6de7-490b-9492-c9fd70fe37b5/openstack-network-exporter/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.397009 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_26ca4d65-6de7-490b-9492-c9fd70fe37b5/ovn-northd/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.404338 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_27389bc5-0ed5-44b7-8061-fe3a9567ad3e/openstack-network-exporter/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.614088 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_27389bc5-0ed5-44b7-8061-fe3a9567ad3e/ovsdbserver-nb/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.674450 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1cc3db3f-e498-4f45-86bb-25781ae2f282/openstack-network-exporter/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.827993 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1cc3db3f-e498-4f45-86bb-25781ae2f282/ovsdbserver-sb/0.log" Oct 03 14:16:58 crc kubenswrapper[4868]: I1003 14:16:58.961874 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5786fc7ff8-jg85w_cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f/placement-api/0.log" Oct 03 14:16:59 crc kubenswrapper[4868]: I1003 14:16:59.101236 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-5786fc7ff8-jg85w_cd4e818d-a3aa-4fc7-a9a5-aaa25c0e687f/placement-log/0.log" Oct 03 14:16:59 crc kubenswrapper[4868]: I1003 14:16:59.170308 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd/setup-container/0.log" Oct 03 14:16:59 crc kubenswrapper[4868]: I1003 14:16:59.482823 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd/rabbitmq/0.log" Oct 03 14:16:59 crc kubenswrapper[4868]: I1003 14:16:59.508900 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_0c74a43f-0f0e-4ec0-bec9-5abedaf5f1cd/setup-container/0.log" Oct 03 14:16:59 crc kubenswrapper[4868]: I1003 14:16:59.707226 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_115a46e7-8030-4ef7-9567-252f2a2a1467/setup-container/0.log" Oct 03 14:16:59 crc kubenswrapper[4868]: I1003 14:16:59.958140 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_115a46e7-8030-4ef7-9567-252f2a2a1467/setup-container/0.log" Oct 03 14:16:59 crc kubenswrapper[4868]: I1003 14:16:59.996344 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_115a46e7-8030-4ef7-9567-252f2a2a1467/rabbitmq/0.log" Oct 03 14:17:00 crc kubenswrapper[4868]: I1003 14:17:00.194930 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-vf4c9_5ee021a5-0c14-460b-afdb-5b73f394355d/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:17:00 crc kubenswrapper[4868]: I1003 14:17:00.282990 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-scfls_9bb16b8f-fb6f-475e-b4cd-fdea4804d5e0/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:17:00 crc kubenswrapper[4868]: I1003 14:17:00.543558 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-mctff_e8ebaf9b-70a4-44de-8873-d9ff816819a1/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:17:00 crc kubenswrapper[4868]: I1003 14:17:00.960824 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-tgkrx_ddfbf7b2-102d-4874-8b6e-d322d5eaabc4/ssh-known-hosts-edpm-deployment/0.log" Oct 03 14:17:00 crc kubenswrapper[4868]: I1003 14:17:00.963930 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-9hdjh_ea96ad6b-3737-4dab-849e-b633d1ecc135/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:17:01 crc kubenswrapper[4868]: I1003 14:17:01.219506 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-75c4dd668c-xqjsj_18a5e2cd-7517-4ef9-ab47-f4236b4bb836/proxy-server/0.log" Oct 03 14:17:01 crc kubenswrapper[4868]: I1003 14:17:01.347888 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-75c4dd668c-xqjsj_18a5e2cd-7517-4ef9-ab47-f4236b4bb836/proxy-httpd/0.log" Oct 03 14:17:01 crc kubenswrapper[4868]: I1003 14:17:01.430378 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-p5w5j_49f095a3-2d77-498e-bacc-3e6c711f4700/swift-ring-rebalance/0.log" Oct 03 14:17:01 crc kubenswrapper[4868]: I1003 14:17:01.567712 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/account-auditor/0.log" Oct 03 14:17:01 crc kubenswrapper[4868]: I1003 14:17:01.648310 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/account-reaper/0.log" Oct 03 14:17:01 crc kubenswrapper[4868]: I1003 14:17:01.755402 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/account-replicator/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.239221 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/container-auditor/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.318005 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/account-server/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.366176 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/container-replicator/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.519120 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/container-updater/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.524966 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/container-server/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.642735 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/object-auditor/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.737240 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/object-replicator/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.776748 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/object-expirer/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.886573 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/object-server/0.log" Oct 03 14:17:02 crc kubenswrapper[4868]: I1003 14:17:02.969906 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/object-updater/0.log" Oct 03 14:17:03 crc kubenswrapper[4868]: I1003 14:17:03.018809 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/rsync/0.log" Oct 03 14:17:03 crc kubenswrapper[4868]: I1003 14:17:03.140872 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_bb0842a9-9947-4561-af16-154496b90622/swift-recon-cron/0.log" Oct 03 14:17:03 crc kubenswrapper[4868]: I1003 14:17:03.230607 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-w5mzq_b3cfb364-3b86-4c7c-bb56-cf85bfd9cde6/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Oct 03 14:17:06 crc kubenswrapper[4868]: I1003 14:17:06.389863 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_e07cfc6a-c6f0-448f-a710-ead7d29c4619/memcached/0.log" Oct 03 14:17:06 crc kubenswrapper[4868]: I1003 14:17:06.546329 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:17:06 crc kubenswrapper[4868]: E1003 14:17:06.546638 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:17:18 crc kubenswrapper[4868]: I1003 14:17:18.544160 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:17:18 crc kubenswrapper[4868]: E1003 14:17:18.544893 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:17:32 crc kubenswrapper[4868]: I1003 14:17:32.544038 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:17:32 crc kubenswrapper[4868]: E1003 14:17:32.544923 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:17:44 crc kubenswrapper[4868]: I1003 14:17:44.549636 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:17:44 crc kubenswrapper[4868]: E1003 14:17:44.550552 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:17:46 crc kubenswrapper[4868]: I1003 14:17:46.166103 4868 generic.go:334] "Generic (PLEG): container finished" podID="ebf1b280-d90d-4d7c-9c60-980c79f1b23d" containerID="a530e62c46d9de16036d1e77ec0292f838916fb935b43e9d21493c77c211d871" exitCode=0 Oct 03 14:17:46 crc kubenswrapper[4868]: I1003 14:17:46.166205 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" event={"ID":"ebf1b280-d90d-4d7c-9c60-980c79f1b23d","Type":"ContainerDied","Data":"a530e62c46d9de16036d1e77ec0292f838916fb935b43e9d21493c77c211d871"} Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.282135 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.314361 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-klrcr"] Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.314976 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zcxtq\" (UniqueName: \"kubernetes.io/projected/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-kube-api-access-zcxtq\") pod \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.315267 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-host\") pod \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\" (UID: \"ebf1b280-d90d-4d7c-9c60-980c79f1b23d\") " Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.315354 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-host" (OuterVolumeSpecName: "host") pod "ebf1b280-d90d-4d7c-9c60-980c79f1b23d" (UID: "ebf1b280-d90d-4d7c-9c60-980c79f1b23d"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.315853 4868 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-host\") on node \"crc\" DevicePath \"\"" Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.320948 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-kube-api-access-zcxtq" (OuterVolumeSpecName: "kube-api-access-zcxtq") pod "ebf1b280-d90d-4d7c-9c60-980c79f1b23d" (UID: "ebf1b280-d90d-4d7c-9c60-980c79f1b23d"). InnerVolumeSpecName "kube-api-access-zcxtq". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.325843 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-klrcr"] Oct 03 14:17:47 crc kubenswrapper[4868]: I1003 14:17:47.416823 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zcxtq\" (UniqueName: \"kubernetes.io/projected/ebf1b280-d90d-4d7c-9c60-980c79f1b23d-kube-api-access-zcxtq\") on node \"crc\" DevicePath \"\"" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.191114 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7f47666017b60a70a355e752ba68ac0cd984071af4d16ff765920b620adaa116" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.191153 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-klrcr" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.514691 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-5tv6r"] Oct 03 14:17:48 crc kubenswrapper[4868]: E1003 14:17:48.515097 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ebf1b280-d90d-4d7c-9c60-980c79f1b23d" containerName="container-00" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.515109 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="ebf1b280-d90d-4d7c-9c60-980c79f1b23d" containerName="container-00" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.515299 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="ebf1b280-d90d-4d7c-9c60-980c79f1b23d" containerName="container-00" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.515894 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.541459 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb408430-63e9-4e6b-b19c-e90d594925a2-host\") pod \"crc-debug-5tv6r\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.541553 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xljz4\" (UniqueName: \"kubernetes.io/projected/bb408430-63e9-4e6b-b19c-e90d594925a2-kube-api-access-xljz4\") pod \"crc-debug-5tv6r\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.557072 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ebf1b280-d90d-4d7c-9c60-980c79f1b23d" path="/var/lib/kubelet/pods/ebf1b280-d90d-4d7c-9c60-980c79f1b23d/volumes" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.643467 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb408430-63e9-4e6b-b19c-e90d594925a2-host\") pod \"crc-debug-5tv6r\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.643566 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xljz4\" (UniqueName: \"kubernetes.io/projected/bb408430-63e9-4e6b-b19c-e90d594925a2-kube-api-access-xljz4\") pod \"crc-debug-5tv6r\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.644900 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb408430-63e9-4e6b-b19c-e90d594925a2-host\") pod \"crc-debug-5tv6r\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.663775 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xljz4\" (UniqueName: \"kubernetes.io/projected/bb408430-63e9-4e6b-b19c-e90d594925a2-kube-api-access-xljz4\") pod \"crc-debug-5tv6r\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:48 crc kubenswrapper[4868]: I1003 14:17:48.834494 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:49 crc kubenswrapper[4868]: I1003 14:17:49.200602 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" event={"ID":"bb408430-63e9-4e6b-b19c-e90d594925a2","Type":"ContainerStarted","Data":"72dbb202c7b88b541e1a702d2aa647ca53eed0ba73389683924ee4399f447274"} Oct 03 14:17:49 crc kubenswrapper[4868]: I1003 14:17:49.200954 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" event={"ID":"bb408430-63e9-4e6b-b19c-e90d594925a2","Type":"ContainerStarted","Data":"e1fe9a71460fceaf4cfe633bab04522b9e4d164055285305cd9f13b0ce2a9276"} Oct 03 14:17:49 crc kubenswrapper[4868]: I1003 14:17:49.218394 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" podStartSLOduration=1.218371863 podStartE2EDuration="1.218371863s" podCreationTimestamp="2025-10-03 14:17:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-10-03 14:17:49.214222833 +0000 UTC m=+5265.424071899" watchObservedRunningTime="2025-10-03 14:17:49.218371863 +0000 UTC m=+5265.428220929" Oct 03 14:17:51 crc kubenswrapper[4868]: I1003 14:17:51.221882 4868 generic.go:334] "Generic (PLEG): container finished" podID="bb408430-63e9-4e6b-b19c-e90d594925a2" containerID="72dbb202c7b88b541e1a702d2aa647ca53eed0ba73389683924ee4399f447274" exitCode=0 Oct 03 14:17:51 crc kubenswrapper[4868]: I1003 14:17:51.222021 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" event={"ID":"bb408430-63e9-4e6b-b19c-e90d594925a2","Type":"ContainerDied","Data":"72dbb202c7b88b541e1a702d2aa647ca53eed0ba73389683924ee4399f447274"} Oct 03 14:17:52 crc kubenswrapper[4868]: I1003 14:17:52.324860 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:52 crc kubenswrapper[4868]: I1003 14:17:52.403628 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb408430-63e9-4e6b-b19c-e90d594925a2-host\") pod \"bb408430-63e9-4e6b-b19c-e90d594925a2\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " Oct 03 14:17:52 crc kubenswrapper[4868]: I1003 14:17:52.403759 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bb408430-63e9-4e6b-b19c-e90d594925a2-host" (OuterVolumeSpecName: "host") pod "bb408430-63e9-4e6b-b19c-e90d594925a2" (UID: "bb408430-63e9-4e6b-b19c-e90d594925a2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 14:17:52 crc kubenswrapper[4868]: I1003 14:17:52.403801 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xljz4\" (UniqueName: \"kubernetes.io/projected/bb408430-63e9-4e6b-b19c-e90d594925a2-kube-api-access-xljz4\") pod \"bb408430-63e9-4e6b-b19c-e90d594925a2\" (UID: \"bb408430-63e9-4e6b-b19c-e90d594925a2\") " Oct 03 14:17:52 crc kubenswrapper[4868]: I1003 14:17:52.404332 4868 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/bb408430-63e9-4e6b-b19c-e90d594925a2-host\") on node \"crc\" DevicePath \"\"" Oct 03 14:17:52 crc kubenswrapper[4868]: I1003 14:17:52.419640 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bb408430-63e9-4e6b-b19c-e90d594925a2-kube-api-access-xljz4" (OuterVolumeSpecName: "kube-api-access-xljz4") pod "bb408430-63e9-4e6b-b19c-e90d594925a2" (UID: "bb408430-63e9-4e6b-b19c-e90d594925a2"). InnerVolumeSpecName "kube-api-access-xljz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:17:52 crc kubenswrapper[4868]: I1003 14:17:52.505604 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xljz4\" (UniqueName: \"kubernetes.io/projected/bb408430-63e9-4e6b-b19c-e90d594925a2-kube-api-access-xljz4\") on node \"crc\" DevicePath \"\"" Oct 03 14:17:53 crc kubenswrapper[4868]: I1003 14:17:53.275831 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" event={"ID":"bb408430-63e9-4e6b-b19c-e90d594925a2","Type":"ContainerDied","Data":"e1fe9a71460fceaf4cfe633bab04522b9e4d164055285305cd9f13b0ce2a9276"} Oct 03 14:17:53 crc kubenswrapper[4868]: I1003 14:17:53.275869 4868 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e1fe9a71460fceaf4cfe633bab04522b9e4d164055285305cd9f13b0ce2a9276" Oct 03 14:17:53 crc kubenswrapper[4868]: I1003 14:17:53.275905 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-5tv6r" Oct 03 14:17:55 crc kubenswrapper[4868]: I1003 14:17:55.909751 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-5tv6r"] Oct 03 14:17:55 crc kubenswrapper[4868]: I1003 14:17:55.919837 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-5tv6r"] Oct 03 14:17:56 crc kubenswrapper[4868]: I1003 14:17:56.544449 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:17:56 crc kubenswrapper[4868]: E1003 14:17:56.544940 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:17:56 crc kubenswrapper[4868]: I1003 14:17:56.556574 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bb408430-63e9-4e6b-b19c-e90d594925a2" path="/var/lib/kubelet/pods/bb408430-63e9-4e6b-b19c-e90d594925a2/volumes" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.091639 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-gnc8l"] Oct 03 14:17:57 crc kubenswrapper[4868]: E1003 14:17:57.092373 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bb408430-63e9-4e6b-b19c-e90d594925a2" containerName="container-00" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.092393 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="bb408430-63e9-4e6b-b19c-e90d594925a2" containerName="container-00" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.092669 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="bb408430-63e9-4e6b-b19c-e90d594925a2" containerName="container-00" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.093488 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.191932 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28p7h\" (UniqueName: \"kubernetes.io/projected/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-kube-api-access-28p7h\") pod \"crc-debug-gnc8l\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.192426 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-host\") pod \"crc-debug-gnc8l\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.294039 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28p7h\" (UniqueName: \"kubernetes.io/projected/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-kube-api-access-28p7h\") pod \"crc-debug-gnc8l\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.294192 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-host\") pod \"crc-debug-gnc8l\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.294446 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-host\") pod \"crc-debug-gnc8l\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.317436 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28p7h\" (UniqueName: \"kubernetes.io/projected/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-kube-api-access-28p7h\") pod \"crc-debug-gnc8l\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:57 crc kubenswrapper[4868]: I1003 14:17:57.422561 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:58 crc kubenswrapper[4868]: I1003 14:17:58.322336 4868 generic.go:334] "Generic (PLEG): container finished" podID="fdebc129-61c7-4e01-9acd-8a2995c9b6b1" containerID="afef6db17ca1d746b2ab440e27dd296aaccc1bf74394fcf4089786a60309e141" exitCode=0 Oct 03 14:17:58 crc kubenswrapper[4868]: I1003 14:17:58.322379 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" event={"ID":"fdebc129-61c7-4e01-9acd-8a2995c9b6b1","Type":"ContainerDied","Data":"afef6db17ca1d746b2ab440e27dd296aaccc1bf74394fcf4089786a60309e141"} Oct 03 14:17:58 crc kubenswrapper[4868]: I1003 14:17:58.322417 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" event={"ID":"fdebc129-61c7-4e01-9acd-8a2995c9b6b1","Type":"ContainerStarted","Data":"6eb9560de31162ec283cbb5e21956b84e4628ea327e9b1cf729d03dde4fe07f7"} Oct 03 14:17:58 crc kubenswrapper[4868]: I1003 14:17:58.380319 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-gnc8l"] Oct 03 14:17:58 crc kubenswrapper[4868]: I1003 14:17:58.387815 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wlxzl/crc-debug-gnc8l"] Oct 03 14:17:59 crc kubenswrapper[4868]: I1003 14:17:59.443031 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:17:59 crc kubenswrapper[4868]: I1003 14:17:59.536816 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28p7h\" (UniqueName: \"kubernetes.io/projected/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-kube-api-access-28p7h\") pod \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " Oct 03 14:17:59 crc kubenswrapper[4868]: I1003 14:17:59.536927 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-host\") pod \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\" (UID: \"fdebc129-61c7-4e01-9acd-8a2995c9b6b1\") " Oct 03 14:17:59 crc kubenswrapper[4868]: I1003 14:17:59.537020 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-host" (OuterVolumeSpecName: "host") pod "fdebc129-61c7-4e01-9acd-8a2995c9b6b1" (UID: "fdebc129-61c7-4e01-9acd-8a2995c9b6b1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Oct 03 14:17:59 crc kubenswrapper[4868]: I1003 14:17:59.537657 4868 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-host\") on node \"crc\" DevicePath \"\"" Oct 03 14:17:59 crc kubenswrapper[4868]: I1003 14:17:59.548339 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-kube-api-access-28p7h" (OuterVolumeSpecName: "kube-api-access-28p7h") pod "fdebc129-61c7-4e01-9acd-8a2995c9b6b1" (UID: "fdebc129-61c7-4e01-9acd-8a2995c9b6b1"). InnerVolumeSpecName "kube-api-access-28p7h". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:17:59 crc kubenswrapper[4868]: I1003 14:17:59.641222 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28p7h\" (UniqueName: \"kubernetes.io/projected/fdebc129-61c7-4e01-9acd-8a2995c9b6b1-kube-api-access-28p7h\") on node \"crc\" DevicePath \"\"" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.030516 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7_f93542ca-2418-4cd7-ade7-78a83fab6088/util/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.259456 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7_f93542ca-2418-4cd7-ade7-78a83fab6088/util/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.262417 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7_f93542ca-2418-4cd7-ade7-78a83fab6088/pull/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.269826 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7_f93542ca-2418-4cd7-ade7-78a83fab6088/pull/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.344973 4868 scope.go:117] "RemoveContainer" containerID="afef6db17ca1d746b2ab440e27dd296aaccc1bf74394fcf4089786a60309e141" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.345084 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/crc-debug-gnc8l" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.434205 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7_f93542ca-2418-4cd7-ade7-78a83fab6088/util/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.458780 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7_f93542ca-2418-4cd7-ade7-78a83fab6088/pull/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.476754 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_6866606fa3a289e0b44cd13ac7038d6356f0a6aa62e0445808c76e969akk4d7_f93542ca-2418-4cd7-ade7-78a83fab6088/extract/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.554799 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fdebc129-61c7-4e01-9acd-8a2995c9b6b1" path="/var/lib/kubelet/pods/fdebc129-61c7-4e01-9acd-8a2995c9b6b1/volumes" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.633926 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-82mnx_5c06b85f-d6da-4e5f-a817-f01a18b0217c/kube-rbac-proxy/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.661311 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-6pz2t_9efa809d-5837-4900-a456-84edfb2ba501/kube-rbac-proxy/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.746293 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-6c675fb79f-82mnx_5c06b85f-d6da-4e5f-a817-f01a18b0217c/manager/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.897929 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-79d68d6c85-6pz2t_9efa809d-5837-4900-a456-84edfb2ba501/manager/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.928358 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-nmp74_3d34922b-c3d5-4795-be9c-a39e2542f42d/kube-rbac-proxy/0.log" Oct 03 14:18:00 crc kubenswrapper[4868]: I1003 14:18:00.975096 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-75dfd9b554-nmp74_3d34922b-c3d5-4795-be9c-a39e2542f42d/manager/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.111801 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-txz24_ac74b0fc-4221-46e6-b88a-f9bd4a484952/kube-rbac-proxy/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.162031 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-846dff85b5-txz24_ac74b0fc-4221-46e6-b88a-f9bd4a484952/manager/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.327722 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-r8szh_a9341a61-ad61-4ab0-8056-fea9a2e0644e/manager/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.335871 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-599898f689-r8szh_a9341a61-ad61-4ab0-8056-fea9a2e0644e/kube-rbac-proxy/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.415942 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-8dh82_ed9b7dc9-9145-42db-bed4-c4cf3f22c07f/kube-rbac-proxy/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.519543 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-6769b867d9-8dh82_ed9b7dc9-9145-42db-bed4-c4cf3f22c07f/manager/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.587171 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-m4cfh_6a879dee-0e96-4658-b0b2-ddfa08037b88/kube-rbac-proxy/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.760112 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-5fbf469cd7-m4cfh_6a879dee-0e96-4658-b0b2-ddfa08037b88/manager/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.764902 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-lsdv9_96f01adb-73f9-45c4-bf04-677ffa2942e2/kube-rbac-proxy/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.977234 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-84bc9db6cc-lsdv9_96f01adb-73f9-45c4-bf04-677ffa2942e2/manager/0.log" Oct 03 14:18:01 crc kubenswrapper[4868]: I1003 14:18:01.978294 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-mql26_f125746c-8bbb-499e-95e0-2dd8071d914e/kube-rbac-proxy/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.047500 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7f55849f88-mql26_f125746c-8bbb-499e-95e0-2dd8071d914e/manager/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.138539 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-fvf5f_10842e9e-e075-4399-88c9-96df14bf7959/kube-rbac-proxy/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.211925 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-6fd6854b49-fvf5f_10842e9e-e075-4399-88c9-96df14bf7959/manager/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.552275 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-lsvj8_1121fe0b-eb0d-43f2-b503-85a3a3601c7e/kube-rbac-proxy/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.608874 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-5c468bf4d4-lsvj8_1121fe0b-eb0d-43f2-b503-85a3a3601c7e/manager/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.616792 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-t7vpl_1f467387-a3f8-4b5b-af79-14eaf2bf799a/kube-rbac-proxy/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.774197 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-6574bf987d-t7vpl_1f467387-a3f8-4b5b-af79-14eaf2bf799a/manager/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.838561 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-jkjfz_4a104edd-a22b-4767-8124-0e1a0e87a999/kube-rbac-proxy/0.log" Oct 03 14:18:02 crc kubenswrapper[4868]: I1003 14:18:02.873385 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-555c7456bd-jkjfz_4a104edd-a22b-4767-8124-0e1a0e87a999/manager/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.009791 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-8j6dq_756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22/kube-rbac-proxy/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.084651 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-59d6cfdf45-8j6dq_756c3a75-5ba1-4c3d-a0ae-f8b1cfe4ae22/manager/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.233681 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5_3f5c7d09-45c2-42bf-b441-70fc16504141/manager/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.270736 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-6f64c4d678qz4w5_3f5c7d09-45c2-42bf-b441-70fc16504141/kube-rbac-proxy/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.382694 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5c4446bf96-6xvjc_89d66ba3-fda2-467a-a2f2-402a8661155b/kube-rbac-proxy/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.605191 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-764f84468b-lbhnl_1435bf3e-adba-43bb-97b4-2caea4a8c4c8/kube-rbac-proxy/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.813445 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-x22cd_3e46757a-79d0-458e-88d7-c91c6661321a/registry-server/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.849851 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-764f84468b-lbhnl_1435bf3e-adba-43bb-97b4-2caea4a8c4c8/operator/0.log" Oct 03 14:18:03 crc kubenswrapper[4868]: I1003 14:18:03.998535 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-wbsqt_546f0324-867d-4a32-a8c2-5e72d95aff3b/kube-rbac-proxy/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.175524 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-49swg_72d193fe-4b1e-4c77-bda6-a44e1a8318b6/kube-rbac-proxy/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.240837 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-688db7b6c7-wbsqt_546f0324-867d-4a32-a8c2-5e72d95aff3b/manager/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.269968 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-7d8bb7f44c-49swg_72d193fe-4b1e-4c77-bda6-a44e1a8318b6/manager/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.435391 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-5f97d8c699-69bv7_97a59cbd-cfe1-49ce-9774-fc9bc76a52b0/operator/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.538533 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-j7pqs_5dbc41e1-0980-4e30-9531-233266c50aca/kube-rbac-proxy/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.709827 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-5c4446bf96-6xvjc_89d66ba3-fda2-467a-a2f2-402a8661155b/manager/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.804483 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-6859f9b676-j7pqs_5dbc41e1-0980-4e30-9531-233266c50aca/manager/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.817095 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-zlj2b_b8907927-2a99-491c-9305-08f86cb8525d/kube-rbac-proxy/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.829491 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-5db5cf686f-zlj2b_b8907927-2a99-491c-9305-08f86cb8525d/manager/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.926171 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-zftwb_52c85570-4e33-49d4-b6df-a65727f1df56/kube-rbac-proxy/0.log" Oct 03 14:18:04 crc kubenswrapper[4868]: I1003 14:18:04.975380 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5cd5cb47d7-zftwb_52c85570-4e33-49d4-b6df-a65727f1df56/manager/0.log" Oct 03 14:18:05 crc kubenswrapper[4868]: I1003 14:18:05.063668 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-zhdvk_70e868cf-a7e6-4942-b88a-71cbd6a992af/kube-rbac-proxy/0.log" Oct 03 14:18:05 crc kubenswrapper[4868]: I1003 14:18:05.194173 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-fcd7d9895-zhdvk_70e868cf-a7e6-4942-b88a-71cbd6a992af/manager/0.log" Oct 03 14:18:07 crc kubenswrapper[4868]: I1003 14:18:07.544842 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:18:07 crc kubenswrapper[4868]: E1003 14:18:07.545489 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:18:17 crc kubenswrapper[4868]: I1003 14:18:17.761697 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ceilometer-0" podUID="b55a39cc-8088-4a92-9976-b45a5e69ffd6" containerName="ceilometer-central-agent" probeResult="failure" output="command timed out" Oct 03 14:18:19 crc kubenswrapper[4868]: I1003 14:18:19.990685 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack-operators/openstack-operator-index-x22cd" podUID="3e46757a-79d0-458e-88d7-c91c6661321a" containerName="registry-server" probeResult="failure" output=< Oct 03 14:18:19 crc kubenswrapper[4868]: timeout: health rpc did not complete within 1s Oct 03 14:18:19 crc kubenswrapper[4868]: > Oct 03 14:18:20 crc kubenswrapper[4868]: I1003 14:18:20.458796 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pkh4z_4538295a-b047-4bba-999b-9d35082ad0c5/machine-api-operator/0.log" Oct 03 14:18:20 crc kubenswrapper[4868]: I1003 14:18:20.544201 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:18:20 crc kubenswrapper[4868]: E1003 14:18:20.544430 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:18:22 crc kubenswrapper[4868]: I1003 14:18:22.761582 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ceilometer-0" podUID="b55a39cc-8088-4a92-9976-b45a5e69ffd6" containerName="ceilometer-central-agent" probeResult="failure" output="command timed out" Oct 03 14:18:24 crc kubenswrapper[4868]: I1003 14:18:24.612320 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-8ftk2_3e67dcd0-021f-4c7e-aaa5-351df73f9f2a/control-plane-machine-set-operator/0.log" Oct 03 14:18:24 crc kubenswrapper[4868]: I1003 14:18:24.613078 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-pkh4z_4538295a-b047-4bba-999b-9d35082ad0c5/kube-rbac-proxy/0.log" Oct 03 14:18:24 crc kubenswrapper[4868]: I1003 14:18:24.818438 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openstack/ceilometer-0" podUID="b55a39cc-8088-4a92-9976-b45a5e69ffd6" containerName="ceilometer-central-agent" probeResult="failure" output=< Oct 03 14:18:24 crc kubenswrapper[4868]: Unkown error: Expecting value: line 1 column 1 (char 0) Oct 03 14:18:24 crc kubenswrapper[4868]: > Oct 03 14:18:24 crc kubenswrapper[4868]: I1003 14:18:24.818523 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack/ceilometer-0" Oct 03 14:18:24 crc kubenswrapper[4868]: I1003 14:18:24.819283 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="ceilometer-central-agent" containerStatusID={"Type":"cri-o","ID":"0b51658f86f057f8b3230e295d2df222b4a124f83a02c49e0e2fb19b7df0623a"} pod="openstack/ceilometer-0" containerMessage="Container ceilometer-central-agent failed liveness probe, will be restarted" Oct 03 14:18:24 crc kubenswrapper[4868]: I1003 14:18:24.819373 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b55a39cc-8088-4a92-9976-b45a5e69ffd6" containerName="ceilometer-central-agent" containerID="cri-o://0b51658f86f057f8b3230e295d2df222b4a124f83a02c49e0e2fb19b7df0623a" gracePeriod=30 Oct 03 14:18:26 crc kubenswrapper[4868]: I1003 14:18:26.421354 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:18:26 crc kubenswrapper[4868]: I1003 14:18:26.572869 4868 generic.go:334] "Generic (PLEG): container finished" podID="b55a39cc-8088-4a92-9976-b45a5e69ffd6" containerID="0b51658f86f057f8b3230e295d2df222b4a124f83a02c49e0e2fb19b7df0623a" exitCode=0 Oct 03 14:18:26 crc kubenswrapper[4868]: I1003 14:18:26.572951 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b55a39cc-8088-4a92-9976-b45a5e69ffd6","Type":"ContainerDied","Data":"0b51658f86f057f8b3230e295d2df222b4a124f83a02c49e0e2fb19b7df0623a"} Oct 03 14:18:29 crc kubenswrapper[4868]: I1003 14:18:29.601679 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b55a39cc-8088-4a92-9976-b45a5e69ffd6","Type":"ContainerStarted","Data":"014ff6de9ba3594ffaa94522a4fe412d1bf09bf0498d7d54c718d0513328338e"} Oct 03 14:18:34 crc kubenswrapper[4868]: I1003 14:18:34.828556 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-24d25_879cd6ef-837a-4622-b311-ba67498835f7/cert-manager-controller/0.log" Oct 03 14:18:35 crc kubenswrapper[4868]: I1003 14:18:35.063016 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-hl8ss_46746d8f-da2f-4a50-b092-76ac67fa11cb/cert-manager-cainjector/0.log" Oct 03 14:18:35 crc kubenswrapper[4868]: I1003 14:18:35.107309 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-8l8xn_9a8014e5-4176-4f00-bd52-6cc60d8995b0/cert-manager-webhook/0.log" Oct 03 14:18:35 crc kubenswrapper[4868]: I1003 14:18:35.543827 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:18:35 crc kubenswrapper[4868]: E1003 14:18:35.544308 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:18:45 crc kubenswrapper[4868]: I1003 14:18:45.882905 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-6b874cbd85-tjlzp_e0c4f245-e33e-43f7-9a23-dd6de5ffd531/nmstate-console-plugin/0.log" Oct 03 14:18:46 crc kubenswrapper[4868]: I1003 14:18:46.047809 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-nznm4_a3945860-da06-49b9-b1fb-824d976dbcb5/nmstate-handler/0.log" Oct 03 14:18:46 crc kubenswrapper[4868]: I1003 14:18:46.101383 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-vlb8q_6f27da3a-f372-4a3f-a88d-a99db0ade467/kube-rbac-proxy/0.log" Oct 03 14:18:46 crc kubenswrapper[4868]: I1003 14:18:46.125116 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-fdff9cb8d-vlb8q_6f27da3a-f372-4a3f-a88d-a99db0ade467/nmstate-metrics/0.log" Oct 03 14:18:46 crc kubenswrapper[4868]: I1003 14:18:46.311238 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-858ddd8f98-7zvh7_2784c314-41c6-4132-a03d-b54844ffd96e/nmstate-operator/0.log" Oct 03 14:18:46 crc kubenswrapper[4868]: I1003 14:18:46.398117 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-6cdbc54649-ksvpj_55f22459-1eaa-4ce8-bd82-6b3c62c57d80/nmstate-webhook/0.log" Oct 03 14:18:46 crc kubenswrapper[4868]: I1003 14:18:46.546325 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:18:46 crc kubenswrapper[4868]: E1003 14:18:46.546566 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:18:57 crc kubenswrapper[4868]: I1003 14:18:57.544342 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:18:57 crc kubenswrapper[4868]: E1003 14:18:57.545354 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:18:59 crc kubenswrapper[4868]: I1003 14:18:59.703576 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-v8khj_8f841000-9d62-4031-ace5-fd99a8d1409a/kube-rbac-proxy/0.log" Oct 03 14:18:59 crc kubenswrapper[4868]: I1003 14:18:59.805704 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-68d546b9d8-v8khj_8f841000-9d62-4031-ace5-fd99a8d1409a/controller/0.log" Oct 03 14:18:59 crc kubenswrapper[4868]: I1003 14:18:59.952992 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-frr-files/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.101494 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-frr-files/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.111210 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-reloader/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.128027 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-metrics/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.157785 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-reloader/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.339655 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-metrics/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.363097 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-reloader/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.379437 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-metrics/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.387515 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-frr-files/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.617123 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-metrics/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.627158 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-frr-files/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.642294 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/cp-reloader/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.668777 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/controller/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.830512 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/frr-metrics/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.836835 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/kube-rbac-proxy/0.log" Oct 03 14:19:00 crc kubenswrapper[4868]: I1003 14:19:00.901163 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/kube-rbac-proxy-frr/0.log" Oct 03 14:19:01 crc kubenswrapper[4868]: I1003 14:19:01.063821 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/reloader/0.log" Oct 03 14:19:01 crc kubenswrapper[4868]: I1003 14:19:01.202149 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-64bf5d555-vhc7k_1c72f7f5-4ce6-4b3a-8d99-592c1c809f9a/frr-k8s-webhook-server/0.log" Oct 03 14:19:01 crc kubenswrapper[4868]: I1003 14:19:01.430186 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-6bc4b54f6f-wtfqc_22faf835-b22c-40ca-b38b-d3749dd60a3c/manager/0.log" Oct 03 14:19:01 crc kubenswrapper[4868]: I1003 14:19:01.469522 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-f7bdd5677-d29dv_37f71aac-03c4-41ee-9f10-27cc8a5bcbfb/webhook-server/0.log" Oct 03 14:19:01 crc kubenswrapper[4868]: I1003 14:19:01.846770 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-s7m4r_7210f599-a063-4c96-80ad-44928756136c/kube-rbac-proxy/0.log" Oct 03 14:19:02 crc kubenswrapper[4868]: I1003 14:19:02.146316 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-gnnv9_7d68d329-fabf-44a2-a134-c5ccfe0ddf96/frr/0.log" Oct 03 14:19:02 crc kubenswrapper[4868]: I1003 14:19:02.378452 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-s7m4r_7210f599-a063-4c96-80ad-44928756136c/speaker/0.log" Oct 03 14:19:09 crc kubenswrapper[4868]: I1003 14:19:09.544563 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:19:09 crc kubenswrapper[4868]: E1003 14:19:09.545398 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:19:13 crc kubenswrapper[4868]: I1003 14:19:13.749544 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt_09388f41-23a9-4759-b318-4694ef98e81a/util/0.log" Oct 03 14:19:13 crc kubenswrapper[4868]: I1003 14:19:13.985396 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt_09388f41-23a9-4759-b318-4694ef98e81a/pull/0.log" Oct 03 14:19:13 crc kubenswrapper[4868]: I1003 14:19:13.987280 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt_09388f41-23a9-4759-b318-4694ef98e81a/util/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.076305 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt_09388f41-23a9-4759-b318-4694ef98e81a/pull/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.180426 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt_09388f41-23a9-4759-b318-4694ef98e81a/util/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.214649 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt_09388f41-23a9-4759-b318-4694ef98e81a/pull/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.245522 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_8f2f4ee801e5826a37d84a7b1fc4ccbf6b79de668302737d0f1152d8d28mtnt_09388f41-23a9-4759-b318-4694ef98e81a/extract/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.424178 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4wqp_e8238c1e-4090-4e0b-b833-621b910b8879/extract-utilities/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.537190 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4wqp_e8238c1e-4090-4e0b-b833-621b910b8879/extract-content/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.564001 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4wqp_e8238c1e-4090-4e0b-b833-621b910b8879/extract-utilities/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.574160 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4wqp_e8238c1e-4090-4e0b-b833-621b910b8879/extract-content/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.771405 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4wqp_e8238c1e-4090-4e0b-b833-621b910b8879/extract-content/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.804351 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4wqp_e8238c1e-4090-4e0b-b833-621b910b8879/extract-utilities/0.log" Oct 03 14:19:14 crc kubenswrapper[4868]: I1003 14:19:14.998617 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5kjld_feb39d54-b39f-4b3e-b010-02e87203341a/extract-utilities/0.log" Oct 03 14:19:15 crc kubenswrapper[4868]: I1003 14:19:15.305982 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-m4wqp_e8238c1e-4090-4e0b-b833-621b910b8879/registry-server/0.log" Oct 03 14:19:15 crc kubenswrapper[4868]: I1003 14:19:15.395450 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5kjld_feb39d54-b39f-4b3e-b010-02e87203341a/extract-utilities/0.log" Oct 03 14:19:15 crc kubenswrapper[4868]: I1003 14:19:15.472387 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5kjld_feb39d54-b39f-4b3e-b010-02e87203341a/extract-content/0.log" Oct 03 14:19:15 crc kubenswrapper[4868]: I1003 14:19:15.507613 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5kjld_feb39d54-b39f-4b3e-b010-02e87203341a/extract-content/0.log" Oct 03 14:19:15 crc kubenswrapper[4868]: I1003 14:19:15.701577 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5kjld_feb39d54-b39f-4b3e-b010-02e87203341a/extract-content/0.log" Oct 03 14:19:15 crc kubenswrapper[4868]: I1003 14:19:15.707678 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5kjld_feb39d54-b39f-4b3e-b010-02e87203341a/extract-utilities/0.log" Oct 03 14:19:15 crc kubenswrapper[4868]: I1003 14:19:15.991963 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv_a1d6ff21-7d1b-46b1-9b66-4b15fcf46120/util/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.132043 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv_a1d6ff21-7d1b-46b1-9b66-4b15fcf46120/pull/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.138594 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv_a1d6ff21-7d1b-46b1-9b66-4b15fcf46120/util/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.242108 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv_a1d6ff21-7d1b-46b1-9b66-4b15fcf46120/pull/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.432891 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv_a1d6ff21-7d1b-46b1-9b66-4b15fcf46120/pull/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.541699 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv_a1d6ff21-7d1b-46b1-9b66-4b15fcf46120/extract/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.544316 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-5kjld_feb39d54-b39f-4b3e-b010-02e87203341a/registry-server/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.556220 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_fa9831ede5d93c33d525b70ce6ddf94e500d80992af75a3305fe98835czndnv_a1d6ff21-7d1b-46b1-9b66-4b15fcf46120/util/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.778126 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-rpjpr_5f135c25-3c52-475a-9833-042496477d82/marketplace-operator/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.786849 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bffqk_bc3e1be6-720c-4877-ab1f-a889f6eeb9fa/extract-utilities/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.957825 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bffqk_bc3e1be6-720c-4877-ab1f-a889f6eeb9fa/extract-utilities/0.log" Oct 03 14:19:16 crc kubenswrapper[4868]: I1003 14:19:16.961128 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bffqk_bc3e1be6-720c-4877-ab1f-a889f6eeb9fa/extract-content/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.019243 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bffqk_bc3e1be6-720c-4877-ab1f-a889f6eeb9fa/extract-content/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.210744 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bffqk_bc3e1be6-720c-4877-ab1f-a889f6eeb9fa/extract-content/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.310448 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bffqk_bc3e1be6-720c-4877-ab1f-a889f6eeb9fa/extract-utilities/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.444070 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-bffqk_bc3e1be6-720c-4877-ab1f-a889f6eeb9fa/registry-server/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.526201 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r767z_c99cb29f-a3c3-4085-a7ea-596646e293f1/extract-utilities/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.619266 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r767z_c99cb29f-a3c3-4085-a7ea-596646e293f1/extract-content/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.660380 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r767z_c99cb29f-a3c3-4085-a7ea-596646e293f1/extract-content/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.660456 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r767z_c99cb29f-a3c3-4085-a7ea-596646e293f1/extract-utilities/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.821218 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r767z_c99cb29f-a3c3-4085-a7ea-596646e293f1/extract-utilities/0.log" Oct 03 14:19:17 crc kubenswrapper[4868]: I1003 14:19:17.850483 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r767z_c99cb29f-a3c3-4085-a7ea-596646e293f1/extract-content/0.log" Oct 03 14:19:18 crc kubenswrapper[4868]: I1003 14:19:18.773190 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-r767z_c99cb29f-a3c3-4085-a7ea-596646e293f1/registry-server/0.log" Oct 03 14:19:23 crc kubenswrapper[4868]: I1003 14:19:23.544187 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:19:23 crc kubenswrapper[4868]: E1003 14:19:23.546131 4868 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-kbwqg_openshift-machine-config-operator(71ca0541-cbbf-4390-b90e-f068349a51f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" Oct 03 14:19:38 crc kubenswrapper[4868]: I1003 14:19:38.544495 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:19:39 crc kubenswrapper[4868]: I1003 14:19:39.261184 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"61d80d0c45c950b5f5b2d0806fd638aa6e53aef4f4b79e2904a8818156c0d134"} Oct 03 14:19:49 crc kubenswrapper[4868]: E1003 14:19:49.297778 4868 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.58:59246->38.102.83.58:43049: write tcp 38.102.83.58:59246->38.102.83.58:43049: write: broken pipe Oct 03 14:21:16 crc kubenswrapper[4868]: I1003 14:21:16.141975 4868 generic.go:334] "Generic (PLEG): container finished" podID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerID="337a092bcd958ae720c1060960f3a364e95411fe60fc35b2bc0328b623ef661f" exitCode=0 Oct 03 14:21:16 crc kubenswrapper[4868]: I1003 14:21:16.142086 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" event={"ID":"25b4e0ac-2921-4866-be4f-d2a53a0f7e43","Type":"ContainerDied","Data":"337a092bcd958ae720c1060960f3a364e95411fe60fc35b2bc0328b623ef661f"} Oct 03 14:21:16 crc kubenswrapper[4868]: I1003 14:21:16.143017 4868 scope.go:117] "RemoveContainer" containerID="337a092bcd958ae720c1060960f3a364e95411fe60fc35b2bc0328b623ef661f" Oct 03 14:21:17 crc kubenswrapper[4868]: I1003 14:21:17.138071 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wlxzl_must-gather-x5t7b_25b4e0ac-2921-4866-be4f-d2a53a0f7e43/gather/0.log" Oct 03 14:21:25 crc kubenswrapper[4868]: I1003 14:21:25.934120 4868 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-wlxzl/must-gather-x5t7b"] Oct 03 14:21:25 crc kubenswrapper[4868]: I1003 14:21:25.934982 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerName="copy" containerID="cri-o://ae54427f10096078181ae494f99670c8a1abeaa2fb10a9138c42f005b8c638e4" gracePeriod=2 Oct 03 14:21:25 crc kubenswrapper[4868]: I1003 14:21:25.952737 4868 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-wlxzl/must-gather-x5t7b"] Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.222567 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wlxzl_must-gather-x5t7b_25b4e0ac-2921-4866-be4f-d2a53a0f7e43/copy/0.log" Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.225323 4868 generic.go:334] "Generic (PLEG): container finished" podID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerID="ae54427f10096078181ae494f99670c8a1abeaa2fb10a9138c42f005b8c638e4" exitCode=143 Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.411582 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wlxzl_must-gather-x5t7b_25b4e0ac-2921-4866-be4f-d2a53a0f7e43/copy/0.log" Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.411939 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.477194 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-must-gather-output\") pod \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.477533 4868 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tns2k\" (UniqueName: \"kubernetes.io/projected/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-kube-api-access-tns2k\") pod \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\" (UID: \"25b4e0ac-2921-4866-be4f-d2a53a0f7e43\") " Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.483428 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-kube-api-access-tns2k" (OuterVolumeSpecName: "kube-api-access-tns2k") pod "25b4e0ac-2921-4866-be4f-d2a53a0f7e43" (UID: "25b4e0ac-2921-4866-be4f-d2a53a0f7e43"). InnerVolumeSpecName "kube-api-access-tns2k". PluginName "kubernetes.io/projected", VolumeGidValue "" Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.579603 4868 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tns2k\" (UniqueName: \"kubernetes.io/projected/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-kube-api-access-tns2k\") on node \"crc\" DevicePath \"\"" Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.624930 4868 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "25b4e0ac-2921-4866-be4f-d2a53a0f7e43" (UID: "25b4e0ac-2921-4866-be4f-d2a53a0f7e43"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Oct 03 14:21:26 crc kubenswrapper[4868]: I1003 14:21:26.681345 4868 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b4e0ac-2921-4866-be4f-d2a53a0f7e43-must-gather-output\") on node \"crc\" DevicePath \"\"" Oct 03 14:21:27 crc kubenswrapper[4868]: I1003 14:21:27.236662 4868 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-wlxzl_must-gather-x5t7b_25b4e0ac-2921-4866-be4f-d2a53a0f7e43/copy/0.log" Oct 03 14:21:27 crc kubenswrapper[4868]: I1003 14:21:27.237336 4868 scope.go:117] "RemoveContainer" containerID="ae54427f10096078181ae494f99670c8a1abeaa2fb10a9138c42f005b8c638e4" Oct 03 14:21:27 crc kubenswrapper[4868]: I1003 14:21:27.237374 4868 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-wlxzl/must-gather-x5t7b" Oct 03 14:21:27 crc kubenswrapper[4868]: I1003 14:21:27.262609 4868 scope.go:117] "RemoveContainer" containerID="337a092bcd958ae720c1060960f3a364e95411fe60fc35b2bc0328b623ef661f" Oct 03 14:21:28 crc kubenswrapper[4868]: I1003 14:21:28.569239 4868 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" path="/var/lib/kubelet/pods/25b4e0ac-2921-4866-be4f-d2a53a0f7e43/volumes" Oct 03 14:22:02 crc kubenswrapper[4868]: I1003 14:22:02.145436 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:22:02 crc kubenswrapper[4868]: I1003 14:22:02.145845 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:22:17 crc kubenswrapper[4868]: I1003 14:22:17.725924 4868 scope.go:117] "RemoveContainer" containerID="a530e62c46d9de16036d1e77ec0292f838916fb935b43e9d21493c77c211d871" Oct 03 14:22:32 crc kubenswrapper[4868]: I1003 14:22:32.145986 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:22:32 crc kubenswrapper[4868]: I1003 14:22:32.146490 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:23:02 crc kubenswrapper[4868]: I1003 14:23:02.145506 4868 patch_prober.go:28] interesting pod/machine-config-daemon-kbwqg container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Oct 03 14:23:02 crc kubenswrapper[4868]: I1003 14:23:02.146126 4868 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Oct 03 14:23:02 crc kubenswrapper[4868]: I1003 14:23:02.146190 4868 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" Oct 03 14:23:02 crc kubenswrapper[4868]: I1003 14:23:02.146892 4868 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"61d80d0c45c950b5f5b2d0806fd638aa6e53aef4f4b79e2904a8818156c0d134"} pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Oct 03 14:23:02 crc kubenswrapper[4868]: I1003 14:23:02.146938 4868 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" podUID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerName="machine-config-daemon" containerID="cri-o://61d80d0c45c950b5f5b2d0806fd638aa6e53aef4f4b79e2904a8818156c0d134" gracePeriod=600 Oct 03 14:23:03 crc kubenswrapper[4868]: I1003 14:23:03.082692 4868 generic.go:334] "Generic (PLEG): container finished" podID="71ca0541-cbbf-4390-b90e-f068349a51f6" containerID="61d80d0c45c950b5f5b2d0806fd638aa6e53aef4f4b79e2904a8818156c0d134" exitCode=0 Oct 03 14:23:03 crc kubenswrapper[4868]: I1003 14:23:03.082794 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerDied","Data":"61d80d0c45c950b5f5b2d0806fd638aa6e53aef4f4b79e2904a8818156c0d134"} Oct 03 14:23:03 crc kubenswrapper[4868]: I1003 14:23:03.083219 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-kbwqg" event={"ID":"71ca0541-cbbf-4390-b90e-f068349a51f6","Type":"ContainerStarted","Data":"fa8e6b551002fe3e26aa6a905b0ddde00e384eeb44da0b165a65eccccc35813b"} Oct 03 14:23:03 crc kubenswrapper[4868]: I1003 14:23:03.083243 4868 scope.go:117] "RemoveContainer" containerID="3ce5d6704617e4cd0ad631bcb157d8c3c6ab1d2c4573af7ad9d941988cb87f7d" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.631284 4868 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mlpvh"] Oct 03 14:23:47 crc kubenswrapper[4868]: E1003 14:23:47.632411 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerName="copy" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.632429 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerName="copy" Oct 03 14:23:47 crc kubenswrapper[4868]: E1003 14:23:47.632455 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdebc129-61c7-4e01-9acd-8a2995c9b6b1" containerName="container-00" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.632463 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdebc129-61c7-4e01-9acd-8a2995c9b6b1" containerName="container-00" Oct 03 14:23:47 crc kubenswrapper[4868]: E1003 14:23:47.632497 4868 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerName="gather" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.632505 4868 state_mem.go:107] "Deleted CPUSet assignment" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerName="gather" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.632723 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerName="gather" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.632740 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="25b4e0ac-2921-4866-be4f-d2a53a0f7e43" containerName="copy" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.632757 4868 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdebc129-61c7-4e01-9acd-8a2995c9b6b1" containerName="container-00" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.634826 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.648176 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mlpvh"] Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.669642 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac9cb79-6a24-470b-8358-e27696b4c5df-catalog-content\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.669736 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac9cb79-6a24-470b-8358-e27696b4c5df-utilities\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.669788 4868 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6tfkf\" (UniqueName: \"kubernetes.io/projected/dac9cb79-6a24-470b-8358-e27696b4c5df-kube-api-access-6tfkf\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.771934 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac9cb79-6a24-470b-8358-e27696b4c5df-utilities\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.772075 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6tfkf\" (UniqueName: \"kubernetes.io/projected/dac9cb79-6a24-470b-8358-e27696b4c5df-kube-api-access-6tfkf\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.772146 4868 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac9cb79-6a24-470b-8358-e27696b4c5df-catalog-content\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.772686 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dac9cb79-6a24-470b-8358-e27696b4c5df-utilities\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.772702 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dac9cb79-6a24-470b-8358-e27696b4c5df-catalog-content\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.793178 4868 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6tfkf\" (UniqueName: \"kubernetes.io/projected/dac9cb79-6a24-470b-8358-e27696b4c5df-kube-api-access-6tfkf\") pod \"redhat-operators-mlpvh\" (UID: \"dac9cb79-6a24-470b-8358-e27696b4c5df\") " pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:47 crc kubenswrapper[4868]: I1003 14:23:47.972792 4868 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mlpvh" Oct 03 14:23:48 crc kubenswrapper[4868]: I1003 14:23:48.479735 4868 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mlpvh"] Oct 03 14:23:48 crc kubenswrapper[4868]: I1003 14:23:48.514657 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlpvh" event={"ID":"dac9cb79-6a24-470b-8358-e27696b4c5df","Type":"ContainerStarted","Data":"ffd84ff2c125956748799646a5b656a4ee957f9a3058e3a43c7f65bcaa0a9ca1"} Oct 03 14:23:49 crc kubenswrapper[4868]: I1003 14:23:49.527644 4868 generic.go:334] "Generic (PLEG): container finished" podID="dac9cb79-6a24-470b-8358-e27696b4c5df" containerID="165134da396aa573f96b9e83f0819cd4a3e3f6f85151ebc2085f7cac6214835d" exitCode=0 Oct 03 14:23:49 crc kubenswrapper[4868]: I1003 14:23:49.528449 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlpvh" event={"ID":"dac9cb79-6a24-470b-8358-e27696b4c5df","Type":"ContainerDied","Data":"165134da396aa573f96b9e83f0819cd4a3e3f6f85151ebc2085f7cac6214835d"} Oct 03 14:23:49 crc kubenswrapper[4868]: I1003 14:23:49.531193 4868 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Oct 03 14:23:51 crc kubenswrapper[4868]: I1003 14:23:51.551418 4868 generic.go:334] "Generic (PLEG): container finished" podID="dac9cb79-6a24-470b-8358-e27696b4c5df" containerID="ee608dad327b0614acd24abad20106c8331e1cd6e027a8876bada50ea2f4f36e" exitCode=0 Oct 03 14:23:51 crc kubenswrapper[4868]: I1003 14:23:51.551664 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlpvh" event={"ID":"dac9cb79-6a24-470b-8358-e27696b4c5df","Type":"ContainerDied","Data":"ee608dad327b0614acd24abad20106c8331e1cd6e027a8876bada50ea2f4f36e"} Oct 03 14:23:52 crc kubenswrapper[4868]: I1003 14:23:52.559962 4868 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mlpvh" event={"ID":"dac9cb79-6a24-470b-8358-e27696b4c5df","Type":"ContainerStarted","Data":"25f0648c466dc29b8985560e1f80c3f68ab6310508e6d0a4643bff6249a91cdc"} Oct 03 14:23:52 crc kubenswrapper[4868]: I1003 14:23:52.583039 4868 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mlpvh" podStartSLOduration=3.144810451 podStartE2EDuration="5.58301705s" podCreationTimestamp="2025-10-03 14:23:47 +0000 UTC" firstStartedPulling="2025-10-03 14:23:49.530931074 +0000 UTC m=+5625.740780140" lastFinishedPulling="2025-10-03 14:23:51.969137673 +0000 UTC m=+5628.178986739" observedRunningTime="2025-10-03 14:23:52.579888477 +0000 UTC m=+5628.789737563" watchObservedRunningTime="2025-10-03 14:23:52.58301705 +0000 UTC m=+5628.792866116" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515067756201024456 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015067756202017374 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015067742616016523 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015067742616015473 5ustar corecore